Sessions
When you want to use the same proxy for multiple requests then you can simply use session_number
parameter. The value of the session can be any integer, simply send a new integer to create a new session (this will allow you to continue using the same proxy for each request with that session number). Sessions expire 60 seconds after the last usage.
No extra cost is involved for using this feature.
API Example
curl "https://api.scrapingdog.com/scrape?api_key=5e3a0e5a97e5b1ca5b194f42da86&url=http://httpbin.org/ip&session_number=666"
import requests
url = "https://api.scrapingdog.com/scrape"
api_key = "5e3a0e5a97e5b1ca5b194f42da86"
target_url = "http://httpbin.org/ip"
session_number = "666"
params = {
"api_key": api_key,
"url": target_url,
"session_number": session_number
}
response = requests.get(url, params=params)
if response.status_code == 200:
print(response.text)
else:
print(f"Request failed with status code {response.status_code}")
const axios = require('axios');
const apiUrl = 'https://api.scrapingdog.com/scrape';
const apiKey = '5e3a0e5a97e5b1ca5b194f42da86';
const targetUrl = 'http://httpbin.org/ip';
const sessionNumber = '666';
const params = {
api_key: apiKey,
url: targetUrl,
session_number: sessionNumber,
};
axios
.get(apiUrl, { params })
.then((response) => {
console.log(response.data);
})
.catch((error) => {
console.error(`Request failed with status code ${error.response.status}`);
});
<?php
$apiUrl = 'https://api.scrapingdog.com/scrape';
$apiKey = '5e3a0e5a97e5b1ca5b194f42da86';
$targetUrl = 'http://httpbin.org/ip';
$sessionNumber = '666';
$queryParams = http_build_query([
'api_key' => $apiKey,
'url' => $targetUrl,
'session_number' => $sessionNumber,
]);
$fullUrl = $apiUrl . '?' . $queryParams;
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $fullUrl);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
$response = curl_exec($ch);
if ($response === false) {
echo 'cURL Error: ' . curl_error($ch);
} else {
echo $response;
}
curl_close($ch);
require 'net/http'
api_url = 'https://api.scrapingdog.com/scrape'
api_key = '5e3a0e5a97e5b1ca5b194f42da86'
target_url = 'http://httpbin.org/ip'
session_number = '666'
query_params = URI.encode_www_form(api_key: api_key, url: target_url, session_number: session_number)
full_url = "#{api_url}?#{query_params}"
uri = URI(full_url)
response = Net::HTTP.get_response(uri)
if response.is_a?(Net::HTTPSuccess)
puts response.body
else
puts "HTTP Request Failed: #{response.code} - #{response.message}"
end
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
public class CurlToJava {
public static void main(String[] args) {
try {
String apiURL = "https://api.scrapingdog.com/scrape";
String apiKey = "5e3a0e5a97e5b1ca5b194f42da86";
String targetURL = "http://httpbin.org/ip";
String sessionNumber = "666";
String query = String.format("api_key=%s&url=%s&session_number=%s",
apiKey, targetURL, sessionNumber);
URL url = new URL(apiURL + "?" + query);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
int responseCode = connection.getResponseCode();
if (responseCode == HttpURLConnection.HTTP_OK) {
BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String inputLine;
StringBuilder response = new StringBuilder();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
in.close();
System.out.println(response.toString());
} else {
System.out.println("HTTP Request Failed: " + responseCode);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
Last updated