Hi Guys
I am writing a Youtube data extraction plugin for a client of mine and
I've been developing locally on my Mac without any issues the helper
class i have written works absolutely fine, I also checked an Ubuntu VM
and its fine.
I then deployed it today to my clients RHEL based server and its
absolutely knackered, largely cause to login to Youtube CMS is a fudge
but a fudge that works when not on RHEL, on RHEL though the login seems
to work as I am redirected to the users management page, but then when I
do a httpget to get the file I require Google seems to have forgotten
I'm logged in and instead of pumping me a zip file I get a lovely html
login page.
I've attached the methods in question, but i'm at a loss as to what I
can do to resolve this. Any ideas? I'll happily supply more information
on request cause today is supposedly handover day and this doesn't work :)
Cheers
Tom
public boolean loginService() throws KettleException{
ResponseHandler<byte[]> handler = new ResponseHandler<byte[]>() {
@Override
public byte[] handleResponse(HttpResponse response) throws
IOException {
HttpEntity entity = response.getEntity();
if (entity != null) {
return EntityUtils.toByteArray(entity);
} else {
return null;
}
}
};
httpGet = new HttpGet(serviceAuthURL);
try {
client.execute(httpGet, handler);
} catch (ClientProtocolException e) {
throw new KettleException(e);
} catch (IOException e) {
throw new KettleException(e);
}
List<Cookie> cookies = cookieStore.getCookies();
String galxValue = cookies.get(0).getValue();
String dsh = "";
HttpPost httpPost = new HttpPost(serviceLoginAuthURL);
List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>(2);
nameValuePairs.add(new BasicNameValuePair("dsh", dsh));
nameValuePairs.add(new BasicNameValuePair("GALX", galxValue));
nameValuePairs.add(new BasicNameValuePair("Email", email));
nameValuePairs.add(new BasicNameValuePair("Passwd", password));
nameValuePairs.add(new BasicNameValuePair("service", "cds"));
try {
httpPost.setEntity(new UrlEncodedFormEntity(nameValuePairs));
} catch (UnsupportedEncodingException e) {
throw new KettleException(e);
}
HttpResponse outgzip2;
try {
outgzip2 = client.execute(httpPost);
} catch (ClientProtocolException e) {
throw new KettleException(e);
} catch (IOException e) {
throw new KettleException(e);
}
if (outgzip2.getStatusLine().getStatusCode() != 302) {
throw new KettleException("Failed : HTTP error code : " +
outgzip2.getStatusLine().getStatusCode());
}
StringBuilder sb = new StringBuilder();
BufferedReader br;
try {
br = new BufferedReader(
new InputStreamReader(outgzip2.getEntity().getContent()));
} catch (IllegalStateException e) {
throw new KettleException(e);
} catch (IOException e) {
throw new KettleException(e);
}
String line;
try {
while ((line = br.readLine()) != null) {
sb.append(line);
}
} catch (IOException e) {
throw new KettleException(e);
}
httpGet = new
HttpGet("https://www.google.com/accounts/b/0/ManageAccount");
client.getParams().setParameter(ClientPNames.COOKIE_POLICY,
CookiePolicy.BROWSER_COMPATIBILITY);
try {
client.execute(httpGet, handler);
} catch (ClientProtocolException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return true;
}
public void getData() throws KettleException{
httpGet = new HttpGet(
authURL + key);
HttpResponse resp = null;
try {
resp = client.execute(httpGet);
} catch (ClientProtocolException e1) {
throw new KettleException(e1);
} catch (IOException e1) {
throw new KettleException(e1);
}
if (resp.getStatusLine().getStatusCode() != 200) {
throw new KettleException("Failed : HTTP error code : " +
resp.getStatusLine().getStatusCode());
}
BufferedReader bufReader = null;
try {
bufReader = new BufferedReader(new
InputStreamReader(resp.getEntity().getContent()));
} catch (IllegalStateException e1) {
throw new KettleException(e1);
} catch (IOException e1) {
throw new KettleException(e1);
}
try {
while ((bufReader.readLine()) != null) {
}
} catch (IOException e1) {
throw new KettleException(e1);
}
String extra = "";
if (report.equals(Type.DEMOGRAPHICS)) {
extra = "&met=VIEWER_PROMILLE";
}
/*
*If you are using the non CMS version of the plugin, then get the
insight data from https://www.youtube.com/insight_proxy_csv.
*/
if (entry != null && entry.equals("Standard")) {
httpGet = new HttpGet(proxyURL+"?dim=" + ReportEnum.getDim(report,
subtype)
+ extra + "&ord=" + ReportEnum.getOrd(report, subtype) +
"&from=" + dateSwitch(datefrom, "ddMMyyyy") + "&to=" + dateSwitch(dateto,
"ddMMyyyy")
+ "&where=" + GeographyEnum.getWhere(where) + "&whatType="
+ SearchEnum.getType(searchType, entry)
+ "&whatId=" + id);
} else if (entry != null && entry.equals("Enterprise") &&
report.equals(Type.PERFORMANCE_REPORT_MONTHLY)) {
httpGet = new HttpGet(
sandboxURL+""+dateSwitch(datefrom,
"yyyyMMdd")+"/YouTube_"+id+"_M_"+dateSwitch(datefrom,
"yyyyMMdd")+"_"+dateSwitch(dateto,
"yyyyMMdd")+"_report.csv."+FileTypeEnum.getExtension(fileType));
weekormonth = "M";
} else if (entry != null && entry.equals("Enterprise") &&
report.equals(Type.PERFORMANCE_REPORT_WEEKLY)) {
httpGet = new HttpGet(
sandboxURL+""+dateSwitch(datefrom,
"yyyyMMdd")+"/YouTube_"+id+"_W_"+dateSwitch(datefrom,
"yyyyMMdd")+"_"+dateSwitch(dateto,"yyyyMMdd")+"_report.csv."+FileTypeEnum.getExtension(fileType));
weekormonth = "W";
} else if (entry != null && entry.equals("Enterprise") &&
report.equals(Type.PERFORMANCE_REPORT_MONTHLY_RAW)) {
httpGet = new HttpGet(
sandboxURL+""+datefrom+"/YouTube_"+id+"_M_"+dateSwitch(datefrom,
"yyyyMMdd")+"_"+dateSwitch(dateto,
"yyyyMMdd")+"_rawdata.csv."+FileTypeEnum.getExtension(fileType));
weekormonth = "M";
} else if (entry != null && entry.equals("Enterprise") &&
report.equals(Type.PERFORMANCE_REPORT_WEEKLY_RAW)) {
httpGet = new HttpGet(
sandboxURL+""+datefrom+"/YouTube_"+id+"_W_"+dateSwitch(datefrom,
"yyyyMMdd")+"_"+dateSwitch(dateto,
"yyyyMMdd")+"_rawdata.csv."+FileTypeEnum.getExtension(fileType));
weekormonth = "W";
} else {
httpGet = new HttpGet(proxyURL+"?dim=" + ReportEnum.getDim(report,
subtype)
+ extra + "&ord=" + ReportEnum.getOrd(report, subtype) +
"&from=" + dateSwitch(datefrom, "ddMMyyyy") + "&to=" + dateSwitch(dateto,
"ddMMyyyy")
+ "&where=" + GeographyEnum.getWhere(where) + "&whatType="
+ SearchEnum.getType(searchType, entry)
+ "&whatId=" + id);
}
//HttpResponse resp = null;
try {
// client.getParams().setParameter(ClientPNames.COOKIE_POLICY,
CookiePolicy.BROWSER_COMPATIBILITY);
System.out.println(httpGet.getURI().toString());
resp = client.execute(httpGet);
} catch (ClientProtocolException e1) {
throw new KettleException(e1);
} catch (IOException e1) {
throw new KettleException(e1);
}
if (resp.getStatusLine().getStatusCode() != 200) {
throw new KettleException("Failed : HTTP error code : " +
resp.getStatusLine().getStatusCode());
}
if
(report.equals(Type.PERFORMANCE_REPORT_MONTHLY)||report.equals(Type.PERFORMANCE_REPORT_MONTHLY_RAW)||report.equals(Type.PERFORMANCE_REPORT_WEEKLY)||report.equals(Type.PERFORMANCE_REPORT_WEEKLY_RAW))
{
try {
if(fileType == FileTypeEnum.Type.ZIP){
InputStream content = resp.getEntity().getContent();
/** When I run this I see the HTML content on the RHEL box
or the compessed zip content on Mac**/
InputStreamReader is2 = new InputStreamReader(content);
BufferedReader br = new BufferedReader(is2);
String read = br.readLine();
while(read != null) {
System.out.println(read);
read = br.readLine();
}
ZipInputStream zipInputStream = null;
try {
zipInputStream = new ZipInputStream((InputStream)
content);
} catch (IllegalStateException e) {
throw new KettleException(e);
}/* catch (IOException e) {
throw new KettleException(e);
}*/
ZipEntry entry ;
final int BUFFER = 2048;
BufferedOutputStream dest = null;
if ((entry = zipInputStream.getNextEntry()) != null) {
if(outputPath==null || outputPath.equals("")){
rd2 = new InputStreamReader(zipInputStream);
//BufferedReader br3 = new BufferedReader(rd2);
csv = new CSVReader(rd2, DEFAULT_SEPARATOR,
DEFAULT_QUOTE_CHARACTER);
}
else {
int count;
byte data[] = new byte[BUFFER];
// write the files to the disk
FileOutputStream fos = new
FileOutputStream(outputPath+entry.getName());
dest = new BufferedOutputStream(fos, BUFFER);
while ((count = zipInputStream.read(data, 0, BUFFER))
!= -1) {
dest.write(data, 0, count);
}
dest.flush();
dest.close();
InputStream is = new
FileInputStream(outputPath+entry.getName());
rd2 = new InputStreamReader(is);
csv = new CSVReader(rd2, DEFAULT_SEPARATOR,
DEFAULT_QUOTE_CHARACTER);
}
}
}
else if(fileType == FileTypeEnum.Type.GZIP){
InputStream stream = new
GZIPInputStream(resp.getEntity().getContent());
if(outputPath==null || outputPath.equals("")){
rd2 = new InputStreamReader(stream);
csv = new CSVReader(rd2, DEFAULT_SEPARATOR,
DEFAULT_QUOTE_CHARACTER);
}
else{
String outp =
outputPath+"YouTube_"+id+"_"+weekormonth+"_"+dateSwitch(datefrom,
"yyyyMMdd")+"_"+dateSwitch(dateto, "yyyyMMdd")+"_report.csv";
FileOutputStream out = new FileOutputStream(outp);
// Transfer bytes from the compressed file to the
output file
byte[] buf = new byte[1024];
int len;
while ((len = stream.read(buf)) > 0) {
out.write(buf, 0, len);
}
// Close the file and stream
stream.close();
out.close();
InputStream is = new FileInputStream(outp);
rd2 = new InputStreamReader(is);
csv = new CSVReader(rd2, DEFAULT_SEPARATOR,
DEFAULT_QUOTE_CHARACTER);
}
}
} catch (IOException e) {
throw new KettleException(e);
}
} else {
try {
rd2 = new BufferedReader(new
InputStreamReader(resp.getEntity().getContent()));
} catch (IllegalStateException e) {
throw new KettleException(e);
} catch (IOException e) {
throw new KettleException(e);
}
csv = new CSVReader(rd2, DEFAULT_SEPARATOR,
DEFAULT_QUOTE_CHARACTER, 0);
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]