-
Notifications
You must be signed in to change notification settings - Fork 0
Open
Description
If we pass a function as a node, it shows an error:
Cannot read/open/parse file large_disk/method_name_data/code2vec/java-large/validation/ShifuML__shifu/src/main/java/ml/shifu/shifu/core/dtrain/dt/BinaryDTSerializer.java 'NoneType' object has no attribute 'type'
Traceback (most recent call last):
File "/hdd/codesumm/data/main.py", line 295, in run_file_procedure
method_metrics = collect_method_metrics(found_method, source_code_bytes, method_name_in_csv)
File "/hdd/codesumm/data/main.py", line 224, in collect_method_metrics
adg_method = parse_from_ast(found_method, source_code_bytes)
File "/hdd/prog_graph3.10/program-graphs/program_graphs/adg/parser/java/parser.py", line 39, in parse_from_ast
mk_adg(ast, adg, parent_adg_node=None, source=source_code_bytes)
File "/hdd/prog_graph3.10/program-graphs/program_graphs/adg/parser/java/parser.py", line 56, in mk_adg
if node.type == 'program':
AttributeError: 'NoneType' object has no attribute 'type'
run_cmd function:
public static long runCmd(ArrayList<byte[]> hashes, String server,
int port, String password, boolean useSSL) throws Exception,
ClassNotFoundException, HashtableFullException {
Exception he = null;
for(int t = 0; t <10;t++) {
InputStream in = null;
PostMethod method = null;
try {
SDFSLogger.getLog().debug("getting hashes [" + hashes.size() + "]");
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream obj_out = new ObjectOutputStream(bos);
obj_out.writeObject(hashes);
String file = com.google.common.io.BaseEncoding.base64Url().encode(
CompressionUtils.compressSnappy(bos.toByteArray()));
StringBuilder sb = new StringBuilder();
Formatter formatter = new Formatter(sb);
formatter.format("file=%s&cmd=batchgetblocks&options=ilovemg",
"ninja");
ByteArrayOutputStream out = new ByteArrayOutputStream();
try{
//String hmac = MgmtServerConnection.getAuth(password);
String url = MgmtServerConnection.createAuthUrl(sb.toString(),password);
method = MgmtServerConnection.connectAndPost(server, port,
url, "", file, useSSL);
in = method.getResponseBodyAsStream();
SDFSLogger.getLog().debug("reading imported blocks");
IOUtils.copy(in, out);
formatter.close();
}finally {
if(method != null)
try {
method.releaseConnection();
}catch(Exception e) {}
}
byte[] sh = CompressionUtils.decompressSnappy(out.toByteArray());
ObjectInputStream obj_in = new ObjectInputStream(
new ByteArrayInputStream(sh));
@SuppressWarnings("unchecked")
List<HashChunk> hck = (List<HashChunk>) obj_in.readObject();
obj_in.close();
if (hck.size() != hashes.size())
throw new IOException("unable to import all blocks requested ["
+ hashes.size() + "] and received [" + hck.size() + "]");
AtomicLong imsz = new AtomicLong();
ArrayList<DataWriter> th = new ArrayList<DataWriter>();
for (int i = 0; i < hck.size(); i++) {
DataWriter dw = new DataWriter();
dw._hc = hck.get(i);
dw.imsz = imsz;
th.add(dw);
executor.execute(dw);
}
Exception e1 = null;
for(;;) {
int nd = 0;
for(DataWriter dw : th) {
if(dw.done) {
if(dw.e1 != null) {
e1 = dw.e1;
}
} else {
nd++;
}
}
if(nd == 0)
break;
Thread.sleep(10);
}
if(e1!=null)
throw e1;
SDFSLogger.getLog().debug("imported " + hck.size());
return imsz.get();
} catch(Exception e) {
he = e;
Thread.sleep(1000);
}finally {
if (in != null) {
try {
in.close();
} catch (Exception e) {
}
}
if (method != null)
try {
method.releaseConnection();
} catch (Exception e) {
}
}
}
throw new IOException(he);
}
Metadata
Metadata
Assignees
Labels
No labels