Implementing SEO Data Transfer (Meta Tags, Redirects) During Migration
SEO data is critical part of migration. Losing search rankings after move can take months to recover. Correct meta tag transfer and redirect configuration minimize losses.
What SEO Data Includes
- Meta title and meta description for each page
- Open Graph tags (og:title, og:description, og:image)
- Canonical URL
- Robots directives (noindex, nofollow)
- Image alt texts
- Schema.org markup
- Hreflang for multilingual sites
- Breadcrumb markup
- Existing redirects
Export SEO Data from WordPress (Yoast)
import mysql.connector
def export_yoast_seo_data(wp_db):
cursor = wp_db.cursor(dictionary=True)
cursor.execute("""
SELECT
p.ID,
p.post_name as slug,
p.post_type,
COALESCE(yoast_title.meta_value, p.post_title) as seo_title,
yoast_desc.meta_value as seo_description,
yoast_og_title.meta_value as og_title,
yoast_og_desc.meta_value as og_description,
yoast_robots.meta_value as robots,
yoast_canonical.meta_value as canonical
FROM wp_posts p
LEFT JOIN wp_postmeta yoast_title ON p.ID = yoast_title.post_id
AND yoast_title.meta_key = '_yoast_wpseo_title'
LEFT JOIN wp_postmeta yoast_desc ON p.ID = yoast_desc.post_id
AND yoast_desc.meta_key = '_yoast_wpseo_metadesc'
WHERE p.post_status = 'publish'
AND p.post_type IN ('post', 'page')
""")
return cursor.fetchall()
Import SEO Data into New CMS
def import_seo_to_strapi(strapi_api, pages_with_seo):
for page in pages_with_seo:
# Find corresponding page in new CMS
result = strapi_api.get(f"/pages?filters[legacy_id][$eq]={page['ID']}")
if not result['data']:
continue
new_id = result['data'][0]['id']
strapi_api.put(f"/pages/{new_id}", {
'data': {
'seo': {
'metaTitle': page['seo_title'],
'metaDescription': page['seo_description'],
'canonicalURL': page['canonical'],
'metaRobots': 'noindex' if page['robots'] == '1' else 'index,follow',
'openGraph': {
'title': page['og_title'] or page['seo_title'],
'description': page['og_description'] or page['seo_description'],
}
}
}
})
Building 301 Redirect Map
import csv
def build_redirect_map(old_urls, new_urls, url_mapping_csv):
"""
Create CSV with mapping of old URLs to new ones
for nginx/htaccess configuration
"""
with open(url_mapping_csv, 'w', newline='') as f:
writer = csv.writer(f)
writer.writerow(['old_url', 'new_url', 'status_code'])
for old_url, new_url in zip(old_urls, new_urls):
if old_url != new_url:
writer.writerow([old_url, new_url, 301])
Nginx Redirect Configuration
def generate_nginx_redirects(redirect_map_csv):
"""Generate nginx map from CSV"""
lines = ['map $request_uri $redirect_uri {', ' default "";']
with open(redirect_map_csv) as f:
reader = csv.DictReader(f)
for row in reader:
old = row['old_url'].rstrip('/')
new = row['new_url']
lines.append(f' "{old}" "{new}";')
if old != '/':
lines.append(f' "{old}/" "{new}";')
lines.append('}')
return '\n'.join(lines)
SEO Validation After Migration
def validate_seo_migration(old_sitemap_urls, new_site_base):
errors = []
for url in old_sitemap_urls:
path = url.replace('https://old-site.com', '')
response = requests.get(f"{new_site_base}{path}", allow_redirects=True)
if response.status_code == 404:
errors.append(f"404: {path}")
elif response.history:
final = response.url
if '404' in final:
errors.append(f"Redirect to 404: {path} → {final}")
return errors
Execution Time
Export SEO data, import to new CMS, generate nginx redirects — 2–3 working days.







