Overview
Bulk Linkedin Finder allows you to extract email addresses from LinkedIn profile URLs at scale. Transform your LinkedIn prospecting efforts by converting profile links into actionable contact information for direct outreach.
Key Features
LinkedIn Integration : Process LinkedIn profile URLs to find email addresses
Bulk Processing : Handle up to 10,000 LinkedIn URLs per operation
Professional Context : Maintain connection between LinkedIn profiles and email contacts
Pattern Matching : Use LinkedIn data to improve email accuracy
Export Capabilities : Download results with LinkedIn profile associations
How LinkedIn Finder Bulk Works
Collect LinkedIn URLs : Gather LinkedIn profile URLs from your prospecting activities
Upload URL List : Provide LinkedIn URLs via text input or file upload
Process Profiles : Launch the email discovery process
Review Results : Examine found email addresses with LinkedIn context
Export Data : Download email addresses linked to LinkedIn profiles
Limitations
Each Bulk is limited to 10,000 URLs
Additional rows will be skipped
Some special or unexpected characters may be deleted in the file
Invalid URLs won't be imported
Duplicate URLs won't be imported
Go SDK Integration
Installation
go get github.com/tomba-io/go
Basic Setup
package main
import (
" fmt "
" log "
" time "
" strings "
" github.com/tomba-io/go/tomba "
" github.com/tomba-io/go/tomba/models "
)
func main () {
// Initialize Tomba client
client := tomba. NewTomba ( "your-api-key" , "your-secret-key" )
// Your LinkedIn finder code here
}
Creating LinkedIn Finder Bulk with URL List
// LinkedIn URLs collected from prospecting
linkedinURLs := [] string {
"https://www.linkedin.com/in/johndoe" ,
"https://www.linkedin.com/in/janesmith" ,
"https://www.linkedin.com/in/mikejohnson" ,
"https://linkedin.com/in/sarahwilson" ,
"https://www.linkedin.com/in/company-ceo" ,
}
// Join URLs with newlines for the bulk operation
urlList := strings. Join (linkedinURLs, " \n " )
params := & models . BulkCreateParams {
Name: "LinkedIn Prospects - Tech Industry Q1" ,
List: urlList,
Sources: true , // Include sources for context
Verify: false , // Skip verification for LinkedIn findings
Notifie: true , // Notify when complete
}
// Create the LinkedIn finder bulk
response, err := client. CreateBulk (models.BulkTypeLinkedIn, params)
if err != nil {
log. Fatal ( "Failed to create LinkedIn finder bulk:" , err)
}
bulkID := * response.Data.ID
fmt. Printf ( "Created LinkedIn finder bulk with ID: %d\n " , bulkID)
Creating LinkedIn Finder Bulk from CSV File
// Create LinkedIn finder from CSV containing URLs
params := & models . BulkCreateParams {
Name: "Sales Prospects - LinkedIn Discovery" ,
Delimiter: "," ,
Column: 1 , // LinkedIn URL column (1-based index)
Sources: true ,
Notifie: true ,
}
// Upload CSV file with LinkedIn URLs
response, err := client. CreateBulkWithFile (
models.BulkTypeLinkedIn,
params,
"/path/to/linkedin-profiles.csv" ,
)
if err != nil {
log. Fatal ( "Failed to create bulk with file:" , err)
}
bulkID := * response.Data.ID
fmt. Printf ( "Created LinkedIn finder bulk: %d\n " , bulkID)
Single LinkedIn Profile Processing (for comparison)
// Process a single LinkedIn profile
linkedinURL := "https://www.linkedin.com/in/example-profile"
result, err := client. LinkedinFinder (linkedinURL)
if err != nil {
log. Printf ( "Failed to find email for %s : %v " , linkedinURL, err)
} else {
fmt. Printf ( "LinkedIn Profile: %s\n " , linkedinURL)
if result.Data.Email != nil {
fmt. Printf ( "Found Email: %s\n " , * result.Data.Email)
fmt. Printf ( "Name: %s %s\n " , result.Data.FirstName, result.Data.LastName)
if result.Data.Position != nil {
fmt. Printf ( "Position: %s\n " , * result.Data.Position)
}
} else {
fmt. Println ( "No email found for this profile" )
}
}
Launching and Monitoring LinkedIn Processing
// Launch the LinkedIn email discovery
launchResponse, err := client. LaunchBulk (models.BulkTypeLinkedIn, bulkID)
if err != nil {
log. Fatal ( "Failed to launch LinkedIn finder:" , err)
}
fmt. Println ( "LinkedIn email discovery started!" )
// Monitor progress with detailed logging
startTime := time. Now ()
ticker := time. NewTicker ( 30 * time.Second)
defer ticker. Stop ()
for {
progress, err := client. GetBulkProgress (models.BulkTypeLinkedIn, bulkID)
if err != nil {
log. Printf ( "Error checking progress: %v " , err)
time. Sleep ( 30 * time.Second)
continue
}
elapsed := time. Since (startTime). Round (time.Second)
fmt. Printf ( "LinkedIn processing: %d%% ( %d profiles processed) - %v elapsed \n " ,
progress.Progress,
progress.Processed,
elapsed,
)
if progress.Status {
fmt. Println ( "LinkedIn email discovery completed!" )
break
}
// LinkedIn processing can be slower, check every 30 seconds
time. Sleep ( 30 * time.Second)
}
Retrieving LinkedIn Discovery Results
// Get detailed results
bulk, err := client. GetBulk (models.BulkTypeLinkedIn, bulkID)
if err != nil {
log. Fatal ( "Failed to get bulk details:" , err)
}
bulkInfo := bulk.Data[ 0 ]
fmt. Printf ( "LinkedIn Discovery Results: \n " )
fmt. Printf ( "- Campaign: %s\n " , bulkInfo.Name)
fmt. Printf ( "- Status: %v\n " , bulkInfo.Status)
fmt. Printf ( "- Profiles Processed: %d\n " , bulkInfo.Processed)
if bulkInfo.TotalEmails != nil {
fmt. Printf ( "- Emails Found: %d\n " , * bulkInfo.TotalEmails)
successRate := float64 ( * bulkInfo.TotalEmails) / float64 (bulkInfo.Processed) * 100
fmt. Printf ( "- Success Rate: %.1f%%\n " , successRate)
}
// Download all results with LinkedIn context
err = client. SaveBulkResults (
models.BulkTypeLinkedIn,
bulkID,
"linkedin-emails.csv" ,
"full" ,
)
if err != nil {
log. Fatal ( "Failed to download results:" , err)
}
fmt. Println ( "LinkedIn results saved to linkedin-emails.csv" )
Advanced Processing with URL Validation
// Validate and process LinkedIn URLs
func validateLinkedInURL ( url string ) bool {
url = strings. ToLower (strings. TrimSpace (url))
return strings. Contains (url, "linkedin.com/in/" ) ||
strings. Contains (url, "linkedin.com/pub/" )
}
func processLinkedInProspects ( client * tomba . Tomba , urls [] string ) error {
// Step 1: Validate URLs
var validURLs [] string
for _, url := range urls {
if validateLinkedInURL (url) {
validURLs = append (validURLs, url)
} else {
log. Printf ( "Skipping invalid URL: %s " , url)
}
}
if len (validURLs) == 0 {
return fmt. Errorf ( "no valid LinkedIn URLs found" )
}
fmt. Printf ( "Processing %d valid LinkedIn URLs \n " , len (validURLs))
// Step 2: Create bulk operation
params := & models . BulkCreateParams {
Name: fmt. Sprintf ( "LinkedIn Discovery - %s " , time. Now (). Format ( "2006-01-02" )),
List: strings. Join (validURLs, " \n " ),
Sources: true ,
Notifie: true ,
}
response, err := client. CreateBulk (models.BulkTypeLinkedIn, params)
if err != nil {
return fmt. Errorf ( "failed to create LinkedIn bulk: %w " , err)
}
bulkID := * response.Data.ID
log. Printf ( "Created LinkedIn bulk: %d " , bulkID)
// Step 3: Launch and monitor
_, err = client. LaunchBulk (models.BulkTypeLinkedIn, bulkID)
if err != nil {
return fmt. Errorf ( "failed to launch bulk: %w " , err)
}
// Monitor with timeout and progress tracking
timeout := time. After ( 60 * time.Minute) // LinkedIn processing can take longer
ticker := time. NewTicker ( 45 * time.Second)
defer ticker. Stop ()
startTime := time. Now ()
for {
select {
case <- timeout:
return fmt. Errorf ( "LinkedIn processing timed out after 60 minutes" )
case <- ticker.C:
progress, err := client. GetBulkProgress (models.BulkTypeLinkedIn, bulkID)
if err != nil {
log. Printf ( "Error checking progress: %v " , err)
continue
}
if progress.Progress % 25 == 0 || progress.Status {
elapsed := time. Since (startTime). Round (time.Second)
log. Printf ( "LinkedIn progress: %d%% ( %d / %d ) - %v " ,
progress.Progress, progress.Processed, len (validURLs), elapsed)
}
if progress.Status {
// Download and analyze results
bulk, err := client. GetBulk (models.BulkTypeLinkedIn, bulkID)
if err != nil {
return fmt. Errorf ( "failed to get final results: %w " , err)
}
info := bulk.Data[ 0 ]
log. Printf ( "LinkedIn discovery completed!" )
log. Printf ( "Total processed: %d " , info.Processed)
if info.TotalEmails != nil {
successRate := float64 ( * info.TotalEmails) / float64 (info.Processed) * 100
log. Printf ( "Emails found: %d ( %.1f%% success rate)" , * info.TotalEmails, successRate)
}
// Save results
timestamp := time. Now (). Format ( "20060102-150405" )
outputFile := fmt. Sprintf ( "linkedin-results- %s .csv" , timestamp)
err = client. SaveBulkResults (models.BulkTypeLinkedIn, bulkID, outputFile, "full" )
if err != nil {
return fmt. Errorf ( "failed to save results: %w " , err)
}
log. Printf ( "Results saved to: %s " , outputFile)
return nil
}
}
}
}
Managing LinkedIn Finder Operations
// List all LinkedIn finder bulks
params := & models . BulkGetParams {
Page: 1 ,
Limit: 10 ,
Direction: "desc" ,
Filter: "all" ,
}
bulks, err := client. GetAllLinkedInBulks (params)
if err != nil {
log. Fatal ( "Failed to get LinkedIn bulks:" , err)
}
fmt. Printf ( "LinkedIn Finder Operations: \n " )
for _, bulk := range bulks.Data {
status := "In Progress"
if bulk.Status {
status = "Completed"
}
fmt. Printf ( "- %s (ID: %d ) \n " , bulk.Name, bulk.BulkID)
fmt. Printf ( " Status: %s | Progress: %d%% | Processed: %d\n " ,
status, bulk.Progress, bulk.Processed)
if bulk.TotalEmails != nil {
fmt. Printf ( " Emails Found: %d\n " , * bulk.TotalEmails)
}
fmt. Println ()
}
// Clean up old operations
for _, bulk := range bulks.Data {
if bulk.Status && time. Since (bulk.CreatedAt) > 30 * 24 * time.Hour { // 30 days old
_, err := client. ArchiveBulk (models.BulkTypeLinkedIn, bulk.BulkID)
if err != nil {
log. Printf ( "Failed to archive bulk %d : %v " , bulk.BulkID, err)
} else {
fmt. Printf ( "Archived old bulk: %s\n " , bulk.Name)
}
}
}
Complete LinkedIn Prospecting Workflow
type LinkedInProspect struct {
URL string
Email string
Name string
Position string
Company string
Found bool
}
func runLinkedInCampaign ( client * tomba . Tomba , csvPath string ) ([] LinkedInProspect , error ) {
// Step 1: Read and validate LinkedIn URLs from CSV
file, err := os. Open (csvPath)
if err != nil {
return nil , fmt. Errorf ( "failed to open CSV: %w " , err)
}
defer file. Close ()
reader := csv. NewReader (file)
records, err := reader. ReadAll ()
if err != nil {
return nil , fmt. Errorf ( "failed to read CSV: %w " , err)
}
var validURLs [] string
for i, record := range records {
if i == 0 { // Skip header
continue
}
if len (record) > 0 && validateLinkedInURL (record[ 0 ]) {
validURLs = append (validURLs, record[ 0 ])
}
}
log. Printf ( "Found %d valid LinkedIn URLs" , len (validURLs))
// Step 2: Process URLs in batches if needed
batchSize := 5000 // Process in smaller batches for better management
var allResults [] LinkedInProspect
for i := 0 ; i < len (validURLs); i += batchSize {
end := i + batchSize
if end > len (validURLs) {
end = len (validURLs)
}
batchURLs := validURLs[i:end]
log. Printf ( "Processing batch %d / %d ( %d URLs)" ,
i / batchSize + 1 , ( len (validURLs) + batchSize - 1 ) / batchSize, len (batchURLs))
batchResults, err := processLinkedInBatch (client, batchURLs)
if err != nil {
log. Printf ( "Batch processing failed: %v " , err)
continue
}
allResults = append (allResults, batchResults ... )
// Small delay between batches
if end < len (validURLs) {
time. Sleep ( 5 * time.Second)
}
}
return allResults, nil
}
func processLinkedInBatch ( client * tomba . Tomba , urls [] string ) ([] LinkedInProspect , error ) {
// Create and process batch
params := & models . BulkCreateParams {
Name: fmt. Sprintf ( "LinkedIn Batch - %s " , time. Now (). Format ( "15:04:05" )),
List: strings. Join (urls, " \n " ),
Sources: true ,
Notifie: false , // Don't notify for batches
}
response, err := client. CreateBulk (models.BulkTypeLinkedIn, params)
if err != nil {
return nil , err
}
bulkID := * response.Data.ID
// Launch and wait for completion
_, err = client. LaunchBulk (models.BulkTypeLinkedIn, bulkID)
if err != nil {
return nil , err
}
// Wait for completion
for {
progress, err := client. GetBulkProgress (models.BulkTypeLinkedIn, bulkID)
if err != nil {
time. Sleep ( 30 * time.Second)
continue
}
if progress.Status {
break
}
time. Sleep ( 30 * time.Second)
}
// Parse results
data, err := client. DownloadBulk (models.BulkTypeLinkedIn, bulkID, & models . BulkDownloadParams {Type: "full" })
if err != nil {
return nil , err
}
// Parse CSV results (simplified - would need proper CSV parsing)
var results [] LinkedInProspect
lines := strings. Split ( string (data), " \n " )
for _, line := range lines[ 1 :] { // Skip header
if strings. TrimSpace (line) == "" {
continue
}
// Parse CSV line and create LinkedInProspect
// This is simplified - use proper CSV parsing library
parts := strings. Split (line, "," )
if len (parts) >= 2 {
prospect := LinkedInProspect {
URL: parts[ 0 ],
Email: parts[ 1 ],
Found: parts[ 1 ] != "" ,
}
results = append (results, prospect)
}
}
return results, nil
}
// Usage example
func main () {
client := tomba. NewTomba ( "your-api-key" , "your-secret-key" )
results, err := runLinkedInCampaign (client, "linkedin-prospects.csv" )
if err != nil {
log. Fatal ( "LinkedIn campaign failed:" , err)
}
// Analyze results
totalProspects := len (results)
emailsFound := 0
for _, prospect := range results {
if prospect.Found {
emailsFound ++
}
}
fmt. Printf ( " \n LinkedIn Campaign Summary: \n " )
fmt. Printf ( "Total Prospects: %d\n " , totalProspects)
fmt. Printf ( "Emails Found: %d\n " , emailsFound)
fmt. Printf ( "Success Rate: %.1f%%\n " , float64 (emailsFound) /float64 (totalProspects) * 100 )
}
CSV File Format Examples
Simple LinkedIn URL List
linkedin_url
https://www.linkedin.com/in/johndoe
https://www.linkedin.com/in/janesmith
https://linkedin.com/in/mikejohnson
https://www.linkedin.com/in/sarahconnor
With Additional Context
linkedin_url,name,company,notes
https://www.linkedin.com/in/johndoe,John Doe,Tech Corp,CEO prospect
https://www.linkedin.com/in/janesmith,Jane Smith,StartupXYZ,CTO contact
https://linkedin.com/in/mikejohnson,Mike Johnson,Enterprise Co,Decision maker
Best Practices
Valid LinkedIn URLs : Ensure URLs are properly formatted and accessible
Profile Quality : Use complete, professional LinkedIn profiles for better results
Compliance : Respect LinkedIn's terms of service and privacy policies
Data Integration : Connect LinkedIn insights with email outreach strategies
Regular Updates : Refresh data as LinkedIn profiles change
Batch Processing : Process large lists in manageable batches
Rate Limiting : Be mindful of processing limits and API quotas
Result Analysis : Track success rates to optimize prospecting strategies
Privacy Considerations : Ensure compliance with data privacy regulations
Quality Control : Validate found emails before using for outreach
Last modified on October 10, 2025