Premium Residential Proxies
You can use this feature to extract data from hard-to-scrape websites. You can access this feature by passing premium=true
. By default, this feature will use USA residential proxies. If you want to use some other country then you can refer to Geotargeting section.
Using this feature will cost you 10 credits per request and 25 credits if used with JS rendering.
API Example
curl "https://api.scrapingdog.com/scrape?api_key=5e5a97e5b1ca5b194f42da86d646&url=http://httpbin.org/ip&premium=true"
import requests
url = "https://api.scrapingdog.com/scrape"
api_key = "5e5a97e5b1ca5b194f42da86d646"
target_url = "http://httpbin.org/ip"
premium = "true"
params = {
"url": target_url,
"api_key": api_key, # Pass the API key as a parameter
"premium": premium
}
response = requests.get(url, params=params)
if response.status_code == 200:
print(response.text)
else:
print(f"Request failed with status code: {response.status_code}")
const axios = require('axios');
const apiUrl = "https://api.scrapingdog.com/scrape";
const apiKey = "5e5a97e5b1ca5b194f42da86d646";
const targetUrl = "http://httpbin.org/ip";
const premium = "true";
axios.get(apiUrl, {
params: {
url: targetUrl,
api_key: apiKey, // Pass the API key as a parameter
premium: premium
}
})
.then(response => {
if (response.status === 200) {
console.log(response.data);
} else {
console.log(`Request failed with status code: ${response.status}`);
}
})
.catch(error => {
console.error("An error occurred:", error);
});
<?php
$apiUrl = "https://api.scrapingdog.com/scrape";
$apiKey = "5e5a97e5b1ca5b194f42da86d646";
$targetUrl = "http://httpbin.org/ip";
$premium = "true";
// Initialize cURL session
$ch = curl_init();
// Set cURL options
curl_setopt($ch, CURLOPT_URL, $apiUrl . "?url=" . $targetUrl . "&api_key=" . $apiKey . "&premium=" . $premium);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
// Execute cURL session and get the response
$response = curl_exec($ch);
// Check if the cURL request was successful
if ($response === false) {
echo "cURL Error: " . curl_error($ch);
} else {
// Handle the response
echo $response;
}
// Close the cURL session
curl_close($ch);
?>
require 'net/http'
require 'uri'
api_url = 'https://api.scrapingdog.com/scrape'
api_key = '5e5a97e5b1ca5b194f42da86d646'
target_url = 'http://httpbin.org/ip'
premium = 'true'
url = URI.parse("#{api_url}?api_key=#{api_key}&url=#{target_url}&premium=#{premium}")
http = Net::HTTP.new(url.host, url.port)
http.use_ssl = (url.scheme == 'https')
request = Net::HTTP::Get.new(url)
response = http.request(request)
if response.code.to_i == 200
puts response.body
else
puts "HTTP Request Failed with code #{response.code}"
end
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
public class Main {
public static void main(String[] args) {
try {
String apiUrl = "https://api.scrapingdog.com/scrape";
String apiKey = "5e5a97e5b1ca5b194f42da86d646";
String targetUrl = "http://httpbin.org/ip";
String premium = "true";
String url = apiUrl + "?api_key=" + apiKey + "&url=" + targetUrl + "&premium=" + premium;
URL obj = new URL(url);
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
con.setRequestMethod("GET");
int responseCode = con.getResponseCode();
if (responseCode == 200) {
BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
String inputLine;
StringBuffer response = new StringBuffer();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
in.close();
System.out.println(response.toString());
} else {
System.out.println("HTTP Request Failed with code " + responseCode);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
Last updated