If you want to pass your own custom headers to scrape a website then you can do that by using custom_headers=true
paramter. This parameter will let you pass your own headers to the request constructor. This can be helpful when you are trying to scrape a website that requires login cookies or something similar. There is absolutely no extra cost to using this parameter.
Our API already manages headers and it is designed to increase the success rate of scraping any website. Only use this feature when either you want to scrape something which is behind an authentication wall or you are expecting something in response headers.
cURL Python Nodejs PHP Ruby Java
Copy curl --header "X-customheader: bar" \
"https://api.scrapingdog.com/scrape?api_key=5e5a97e5b1ca5b194f42da86fr444356&url=http://httpbin.org/anything&custom_headers=true"
Copy import requests
url = "https://api.scrapingdog.com/scrape"
api_key = "5e5a97e5b1ca5b194f42da86fr444356"
headers = {
"X-customheader": "bar"
}
params = {
"api_key": api_key,
"url": "http://httpbin.org/anything",
"custom_headers": "true"
}
response = requests.get(url, headers=headers, params=params)
if response.status_code == 200:
print(response.text)
else:
print(f"Request failed with status code: {response.status_code}")
Copy const axios = require('axios');
const url = 'https://api.scrapingdog.com/scrape';
const api_key = '5e5a97e5b1ca5b194f42da86fr444356';
const custom_headers = 'true';
const headers = {
'X-customheader': 'bar',
};
const params = {
api_key: api_key,
url: 'http://httpbin.org/anything',
custom_headers: custom_headers,
};
axios
.get(url, { headers, params })
.then((response) => {
if (response.status === 200) {
console.log(response.data);
} else {
console.log(`Request failed with status code: ${response.status}`);
}
})
.catch((error) => {
console.error('Error:', error);
});
Copy <?php
$api_key = '5e5a97e5b1ca5b194f42da86fr444356';
$url = 'http://httpbin.org/anything';
$custom_headers = true;
$ch = curl_init();
$custom_headers = ['X-customheader: bar'];
curl_setopt($ch, CURLOPT_URL, "https://api.scrapingdog.com/scrape?api_key=$api_key&url=$url&custom_headers=$custom_headers");
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_HTTPHEADER, $custom_headers);
$response = curl_exec($ch);
if ($response === false) {
echo 'cURL error: ' . curl_error($ch);
} else {
echo $response;
}
curl_close($ch);
?>
Copy require 'net/http'
url = URI.parse("https://api.scrapingdog.com/scrape?api_key=5e5a97e5b1ca5b194f42da86fr444356&url=http://httpbin.org/anything&custom_headers=true")
request = Net::HTTP::Get.new(url)
request['X-customheader'] = 'bar'
response = Net::HTTP.start(url.host, url.port, use_ssl: true) do |http|
http.request(request)
end
puts response.body
Copy import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
public class CurlToJava {
public static void main(String[] args) {
try {
String url = "https://api.scrapingdog.com/scrape?api_key=5e5a97e5b1ca5b194f42da86fr444356&url=http://httpbin.org/anything&custom_headers=true";
URL obj = new URL(url);
HttpURLConnection connection = (HttpURLConnection) obj.openConnection();
// Set custom header
connection.setRequestProperty("X-customheader", "bar");
int responseCode = connection.getResponseCode();
System.out.println("Response Code: " + responseCode);
BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String inputLine;
StringBuilder response = new StringBuilder();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
in.close();
System.out.println(response.toString());
} catch (Exception e) {
e.printStackTrace();
}
}
}