You can even collect the scraped data to your favorite database. You just need to add a webhook URL and the scraped data will be submitted to your target destination.
Where to add the Webhook URL?
Once you log in to your dashboard you can click on Add Webhook URL button.
Once you click on that button you will find a box where you will be asked to paste your webhook url.
You will also be asked to name this webhook. This name should be unique as it will be later used to identify the target storage URL. This webhook URL should be a POST API.
Curl Python Nodejs PHP Ruby Java
Copy curl "https://api.scrapingdog.com/webhook?api_key=5e5a97e5b1ca5b194f42da86&webhook_id=random_name&url=http://httpbin.org/ip&dynamic=false"
Copy import requests
url = "https://api.scrapingdog.com/webhook"
params = {
"api_key" : "5e5a97e5b1ca5b194f42da86" ,
"url" : "http://httpbin.org/ip" ,
"dynamic" : "false" ,
"webhook_id" : "random_name"
}
response = requests . get (url, params = params)
print (response.text)
Copy const axios = require ( 'axios' );
const apiUrl = 'https://api.scrapingdog.com/webhook' ;
const apiKey = '5e5a97e5b1ca5b194f42da86' ;
const targetUrl = 'http://httpbin.org/ip' ;
const dynamic = false ;
const webhookName = "random_name"
const params = {
api_key : apiKey ,
url : targetUrl ,
dynamic : dynamic .toString () ,
webhook_id : webhookName
};
axios
.get (apiUrl , { params })
.then ((response) => {
if ( response .status === 200 ) {
console .log ( response .data);
} else {
console .error ( `Failed to retrieve data. Status code: ${ response .status } ` );
}
})
.catch ((error) => {
console .error ( 'An error occurred:' , error .message);
});
Copy <? php
$apiUrl = 'https://api.scrapingdog.com/webhook' ;
$apiKey = '5e5a97e5b1ca5b194f42da86' ;
$targetUrl = 'http://httpbin.org/ip' ;
$dynamic = false ;
$webhook_id = "random_name"
$queryParams = [
'api_key' => $apiKey ,
'url' => $targetUrl ,
'dynamic' => $dynamic ,
'webhook_id' => $webhook_id
];
$queryString = http_build_query ( $queryParams ) ;
$fullUrl = $apiUrl . '?' . $queryString;
$curl = curl_init () ;
curl_setopt ( $curl , CURLOPT_URL , $fullUrl ) ;
curl_setopt ( $curl , CURLOPT_RETURNTRANSFER , true ) ;
$response = curl_exec ( $curl ) ;
if ($response === false ) {
echo 'cURL error: ' . curl_error ( $curl ) ;
} else {
$httpCode = curl_getinfo ( $curl , CURLINFO_HTTP_CODE ) ;
if ($httpCode === 200 ) {
echo $response;
} else {
echo 'Failed to retrieve data. Status code: ' . $httpCode;
}
}
curl_close ( $curl ) ;
?>
Copy require 'net/http'
require 'uri'
api_url = 'https://api.scrapingdog.com/webhook'
api_key = '5e5a97e5b1ca5b194f42da86'
target_url = 'http://httpbin.org/ip'
dynamic = false
webhook_id = 'random_name'
uri = URI . parse(api_url)
params = {
'api_key' => api_key ,
'url' => target_url ,
'dynamic' => dynamic
'webhook_id' = > webhook_id
}
uri . query = URI . encode_www_form(params)
http = Net :: HTTP . new (uri . host , uri . port)
http . use_ssl = true
request = Net :: HTTP :: Get . new (uri . request_uri)
response = http . request(request)
if response . code == '200'
puts response . body
else
puts "Failed to retrieve data. Status code: #{response . code} "
end
Copy import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
public class ScrapingDogAPITest {
public static void main(String[] args) {
try {
String apiUrl = "https://api.scrapingdog.com/webhook";
String apiKey = "5e5a97e5b1ca5b194f42da86";
String targetUrl = "http://httpbin.org/ip";
String webhook_id = "random_name";
boolean dynamic = false;
// Construct the query parameters
Map<String, String> params = new HashMap<>();
params.put("api_key", apiKey);
params.put("url", targetUrl);
params.put("webhook_id", webhook_id);
params.put("dynamic", String.valueOf(dynamic));
// Build the query URL
StringBuilder query = new StringBuilder(apiUrl);
query.append("?");
for (Map.Entry<String, String> entry : params.entrySet()) {
query.append(entry.getKey()).append("=").append(entry.getValue()).append("&");
}
String queryUrl = query.toString().substring(0, query.length() - 1);
// Create an HTTP connection and set up the request
URL url = new URL(queryUrl);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
// Get the response code
int responseCode = connection.getResponseCode();
if (responseCode == HttpURLConnection.HTTP_OK) {
// Read and print the response
BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String inputLine;
StringBuilder response = new StringBuilder();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
in.close();
System.out.println(response.toString());
} else {
System.out.println("Failed to retrieve data. Status code: " + responseCode);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
Copy {
"sid" : "4286cfd7-4752-4f6b-8cc6-df01a8ac3ce3"
}