repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/helium/ApplicationContext.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/helium/ApplicationContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.helium;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.interpreter.InterpreterOutput;
/**
* ApplicationContext
*/
public class ApplicationContext {
private final String noteId;
private final String paragraphId;
private final String applicationInstanceId;
private final HeliumAppAngularObjectRegistry angularObjectRegistry;
public final InterpreterOutput out;
public ApplicationContext(String noteId,
String paragraphId,
String applicationInstanceId,
HeliumAppAngularObjectRegistry angularObjectRegistry,
InterpreterOutput out) {
this.noteId = noteId;
this.paragraphId = paragraphId;
this.applicationInstanceId = applicationInstanceId;
this.angularObjectRegistry = angularObjectRegistry;
this.out = out;
}
public String getNoteId() {
return noteId;
}
public String getParagraphId() {
return paragraphId;
}
public String getApplicationInstanceId() {
return applicationInstanceId;
}
public HeliumAppAngularObjectRegistry getAngularObjectRegistry() {
return angularObjectRegistry;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/helium/Application.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/helium/Application.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.helium;
import org.apache.zeppelin.annotation.Experimental;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.resource.ResourceSet;
import java.io.IOException;
/**
* Base class for pluggable application (e.g. visualization)
* Application can access resources from ResourcePool and interact with front-end using
* AngularDisplay system
*/
@Experimental
public abstract class Application {
private final ApplicationContext context;
public Application(ApplicationContext context) {
this.context = context;
}
public ApplicationContext context() {
return context;
}
/**
* This method can be invoked multiple times before unload(),
* Either just after application selected or when paragraph re-run after application load
*/
@Experimental
public abstract void run(ResourceSet args)
throws ApplicationException, IOException;
/**
* this method is invoked just before application is removed
*/
@Experimental
public abstract void unload() throws ApplicationException;
/**
* Print string on the notebook
* @param string
* @throws IOException
*/
@Experimental
public void print(String string) throws IOException {
context.out.write(string);
}
/**
* Print string on the notebook with newline
* @param string
* @throws IOException
*/
@Experimental
public void println(String string) throws IOException {
print(string + "\n");
}
/**
* Print resource on the notebook
* @param resourceName
* @throws IOException
*/
@Experimental
public void printResource(String resourceName) throws IOException {
context.out.writeResource(resourceName);
}
/**
* Print resource as a javascript
*
* Using this method does not require print javascript inside of <script></script> tag.
* Javascript printed using this method will be run in the un-named function.
* i.e. each method call will creates different variable scope for the javascript code.
*
* This method inject '$z' into the variable scope for convenience.
*
* $z.scope : angularjs scope object for this application
* $z.id : unique id for this application instance
*
* @param resourceName
* @throws IOException
*/
@Experimental
public void printResourceAsJavascript(String resourceName) throws IOException {
beginJavascript();
context.out.writeResource(resourceName);
endJavascript();
}
/**
* Print string as a javascript
*
* Using this method does not require print javascript inside of <script></script> tag.
* Javascript printed using this method will be run in the un-named function.
* i.e. each method call will creates different variable scope for the javascript code.
*
* This method inject '$z' into the variable scope for convenience.
*
* $z.scope : angularjs scope object for this application
* $z.id : unique id for this application instance
*
* @param js
* @throws IOException
*/
@Experimental
public void printStringAsJavascript(String js) throws IOException {
beginJavascript();
context.out.write(js);
endJavascript();
}
private void beginJavascript() throws IOException {
StringBuffer js = new StringBuffer();
js.append("\n<script id=\"app_js_" + js.hashCode() + "\">\n");
js.append("(function() {\n");
js.append("let $z = {\n");
js.append("id : \"" + context.getApplicationInstanceId() + "\",\n");
js.append("scope : angular.element(\"#app_js_" + js.hashCode() + "\").scope()\n");
js.append("};\n");
js.append("$z.result = ($z.scope._devmodeResult) ? " +
"$z.scope._devmodeResult : $z.scope.$parent.paragraph.result;\n");
context.out.write(js.toString());
}
private void endJavascript() throws IOException {
StringBuffer js = new StringBuffer();
js.append("\n})();\n");
js.append("</script>\n");
context.out.write(js.toString());
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/user/Credentials.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/user/Credentials.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.user;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.attribute.PosixFilePermission;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import static java.nio.file.attribute.PosixFilePermission.OWNER_READ;
import static java.nio.file.attribute.PosixFilePermission.OWNER_WRITE;
/**
* Class defining credentials for data source authorization
*/
public class Credentials {
private static final Logger LOG = LoggerFactory.getLogger(Credentials.class);
private Map<String, UserCredentials> credentialsMap;
private Gson gson;
private Boolean credentialsPersist = true;
File credentialsFile;
public Credentials(Boolean credentialsPersist, String credentialsPath) {
this.credentialsPersist = credentialsPersist;
if (credentialsPath != null) {
credentialsFile = new File(credentialsPath);
}
credentialsMap = new HashMap<>();
if (credentialsPersist) {
GsonBuilder builder = new GsonBuilder();
builder.setPrettyPrinting();
gson = builder.create();
loadFromFile();
}
}
public UserCredentials getUserCredentials(String username) {
UserCredentials uc = credentialsMap.get(username);
if (uc == null) {
uc = new UserCredentials();
}
return uc;
}
public void putUserCredentials(String username, UserCredentials uc) throws IOException {
credentialsMap.put(username, uc);
saveCredentials();
}
public UserCredentials removeUserCredentials(String username) throws IOException {
UserCredentials uc;
uc = credentialsMap.remove(username);
saveCredentials();
return uc;
}
public boolean removeCredentialEntity(String username, String entity) throws IOException {
UserCredentials uc = credentialsMap.get(username);
if (uc != null && uc.existUsernamePassword(entity) == false) {
return false;
}
uc.removeUsernamePassword(entity);
saveCredentials();
return true;
}
public void saveCredentials() throws IOException {
if (credentialsPersist) {
saveToFile();
}
}
private void loadFromFile() {
LOG.info(credentialsFile.getAbsolutePath());
if (!credentialsFile.exists()) {
// nothing to read
return;
}
try {
FileInputStream fis = new FileInputStream(credentialsFile);
InputStreamReader isr = new InputStreamReader(fis);
BufferedReader bufferedReader = new BufferedReader(isr);
StringBuilder sb = new StringBuilder();
String line;
while ((line = bufferedReader.readLine()) != null) {
sb.append(line);
}
isr.close();
fis.close();
String json = sb.toString();
CredentialsInfoSaving info = gson.fromJson(json, CredentialsInfoSaving.class);
this.credentialsMap = info.credentialsMap;
} catch (IOException e) {
LOG.error("Error loading credentials file", e);
e.printStackTrace();
}
}
private void saveToFile() throws IOException {
String jsonString;
synchronized (credentialsMap) {
CredentialsInfoSaving info = new CredentialsInfoSaving();
info.credentialsMap = credentialsMap;
jsonString = gson.toJson(info);
}
try {
if (!credentialsFile.exists()) {
credentialsFile.createNewFile();
Set<PosixFilePermission> permissions = EnumSet.of(OWNER_READ, OWNER_WRITE);
Files.setPosixFilePermissions(credentialsFile.toPath(), permissions);
}
FileOutputStream fos = new FileOutputStream(credentialsFile, false);
OutputStreamWriter out = new OutputStreamWriter(fos);
out.append(jsonString);
out.close();
fos.close();
} catch (IOException e) {
LOG.error("Error saving credentials file", e);
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/user/UsernamePassword.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/user/UsernamePassword.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.user;
/**
* Username and Password POJO
*/
public class UsernamePassword {
private String username;
private String password;
public UsernamePassword(String username, String password) {
this.username = username;
this.password = password;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
@Override
public String toString() {
return "UsernamePassword{" +
"username='" + username + '\'' +
", password='" + password + '\'' +
'}';
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/user/AuthenticationInfo.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/user/AuthenticationInfo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.user;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/***
*
*/
public class AuthenticationInfo {
private static final Logger LOG = LoggerFactory.getLogger(AuthenticationInfo.class);
String user;
String ticket;
UserCredentials userCredentials;
public static final AuthenticationInfo ANONYMOUS = new AuthenticationInfo("anonymous",
"anonymous");
public AuthenticationInfo() {}
public AuthenticationInfo(String user) {
this.user = user;
}
/***
*
* @param user
* @param ticket
*/
public AuthenticationInfo(String user, String ticket) {
this.user = user;
this.ticket = ticket;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getTicket() {
return ticket;
}
public void setTicket(String ticket) {
this.ticket = ticket;
}
public UserCredentials getUserCredentials() {
return userCredentials;
}
public void setUserCredentials(UserCredentials userCredentials) {
this.userCredentials = userCredentials;
}
public static boolean isAnonymous(AuthenticationInfo subject) {
if (subject == null) {
LOG.warn("Subject is null, assuming anonymous. "
+ "Not recommended to use subject as null except in tests");
return true;
}
return subject.isAnonymous();
}
public boolean isAnonymous() {
return ANONYMOUS.equals(this) || "anonymous".equalsIgnoreCase(this.getUser())
|| StringUtils.isEmpty(this.getUser());
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/user/UserCredentials.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/user/UserCredentials.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.user;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* User Credentials POJO
*/
public class UserCredentials {
private Map<String, UsernamePassword> userCredentials = new ConcurrentHashMap<>();
public UsernamePassword getUsernamePassword(String entity) {
return userCredentials.get(entity);
}
public void putUsernamePassword(String entity, UsernamePassword up) {
userCredentials.put(entity, up);
}
public void removeUsernamePassword(String entity) {
userCredentials.remove(entity);
}
public boolean existUsernamePassword(String entity) {
return userCredentials.containsKey(entity);
}
@Override
public String toString() {
return "UserCredentials{" +
"userCredentials=" + userCredentials +
'}';
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/user/CredentialsInfoSaving.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/user/CredentialsInfoSaving.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.user;
import java.util.Map;
/**
* Helper class to save credentials
*/
public class CredentialsInfoSaving {
public Map<String, UserCredentials> credentialsMap;
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/AngularObject.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/AngularObject.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.display;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.ExecutorService;
import org.apache.zeppelin.scheduler.ExecutorFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* AngularObject provides binding between back-end (interpreter) and front-end
* User provided object will automatically synchronized with front-end side.
* i.e. update from back-end will be sent to front-end, update from front-end will sent-to backend
*
* @param <T>
*/
public class AngularObject<T> {
private String name;
private T object;
private transient AngularObjectListener listener;
private transient List<AngularObjectWatcher> watchers = new LinkedList<>();
private String noteId; // noteId belonging to. null for global scope
private String paragraphId; // paragraphId belongs to. null for notebook scope
/**
* Public constructor, neccessary for the deserialization when using Thrift angularRegistryPush()
* Without public constructor, GSON library will instantiate the AngularObject using
* serialization so the <strong>watchers</strong> list won't be initialized and will throw
* NullPointerException the first time it is accessed
*/
public AngularObject() {
}
/**
* To create new AngularObject, use AngularObjectRegistry.add()
*
* @param name name of object
* @param o reference to user provided object to sent to front-end
* @param noteId noteId belongs to. can be null
* @param paragraphId paragraphId belongs to. can be null
* @param listener event listener
*/
protected AngularObject(String name, T o, String noteId, String paragraphId,
AngularObjectListener listener) {
this.name = name;
this.noteId = noteId;
this.paragraphId = paragraphId;
this.listener = listener;
object = o;
}
/**
* Get name of this object
* @return name
*/
public String getName() {
return name;
}
/**
* Set noteId
* @param noteId noteId belongs to. can be null
*/
public void setNoteId(String noteId) {
this.noteId = noteId;
}
/**
* Get noteId
* @return noteId
*/
public String getNoteId() {
return noteId;
}
/**
* get ParagraphId
* @return paragraphId
*/
public String getParagraphId() {
return paragraphId;
}
/**
* Set paragraphId
* @param paragraphId paragraphId. can be null
*/
public void setParagraphId(String paragraphId) {
this.paragraphId = paragraphId;
}
/**
* Check if it is global scope object
* @return true it is global scope
*/
public boolean isGlobal() {
return noteId == null;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AngularObject<?> that = (AngularObject<?>) o;
return Objects.equals(name, that.name) &&
Objects.equals(noteId, that.noteId) &&
Objects.equals(paragraphId, that.paragraphId);
}
@Override
public int hashCode() {
return Objects.hash(name, noteId, paragraphId);
}
/**
* Get value
* @return
*/
public Object get() {
return object;
}
/**
* fire updated() event for listener
* Note that it does not invoke watcher.watch()
*/
public void emit(){
if (listener != null) {
listener.updated(this);
}
}
/**
* Set value
* @param o reference to new user provided object
*/
public void set(T o) {
set(o, true);
}
/**
* Set value
* @param o reference to new user provided object
* @param emit false on skip firing event for listener. note that it does not skip invoke
* watcher.watch() in any case
*/
public void set(T o, boolean emit) {
final T before = object;
final T after = o;
object = o;
if (emit) {
emit();
}
final Logger logger = LoggerFactory.getLogger(AngularObject.class);
List<AngularObjectWatcher> ws = new LinkedList<>();
synchronized (watchers) {
ws.addAll(watchers);
}
ExecutorService executor = ExecutorFactory.singleton().createOrGet("angularObjectWatcher", 50);
for (final AngularObjectWatcher w : ws) {
executor.submit(new Runnable() {
@Override
public void run() {
try {
w.watch(before, after);
} catch (Exception e) {
logger.error("Exception on watch", e);
}
}
});
}
}
/**
* Set event listener for this object
* @param listener
*/
public void setListener(AngularObjectListener listener) {
this.listener = listener;
}
/**
* Get event listener of this object
* @return event listener
*/
public AngularObjectListener getListener() {
return listener;
}
/**
* Add a watcher for this object.
* Multiple watcher can be registered.
*
* @param watcher watcher to add
*/
public void addWatcher(AngularObjectWatcher watcher) {
synchronized (watchers) {
watchers.add(watcher);
}
}
/**
* Remove a watcher from this object
* @param watcher watcher to remove
*/
public void removeWatcher(AngularObjectWatcher watcher) {
synchronized (watchers) {
watchers.remove(watcher);
}
}
/**
* Remove all watchers from this object
*/
public void clearAllWatchers() {
synchronized (watchers) {
watchers.clear();
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("AngularObject{");
sb.append("noteId='").append(noteId).append('\'');
sb.append(", paragraphId='").append(paragraphId).append('\'');
sb.append(", object=").append(object);
sb.append(", name='").append(name).append('\'');
sb.append('}');
return sb.toString();
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/GUI.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/GUI.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.display;
import java.io.Serializable;
import java.util.*;
import org.apache.zeppelin.display.Input.ParamOption;
/**
* Settings of a form.
*/
public class GUI implements Serializable {
Map<String, Object> params = new HashMap<>(); // form parameters from client
LinkedHashMap<String, Input> forms = new LinkedHashMap<>(); // form configuration
public GUI() {
}
public void setParams(Map<String, Object> values) {
this.params = values;
}
public Map<String, Object> getParams() {
return params;
}
public LinkedHashMap<String, Input> getForms() {
return forms;
}
public void setForms(LinkedHashMap<String, Input> forms) {
this.forms = forms;
}
public Object input(String id, Object defaultValue) {
// first find values from client and then use default
Object value = params.get(id);
if (value == null) {
value = defaultValue;
}
forms.put(id, new Input(id, defaultValue, "input"));
return value;
}
public Object input(String id) {
return input(id, "");
}
public Object select(String id, Object defaultValue, ParamOption[] options) {
Object value = params.get(id);
if (value == null) {
value = defaultValue;
}
forms.put(id, new Input(id, defaultValue, "select", options));
return value;
}
public Collection<Object> checkbox(String id, Collection<Object> defaultChecked,
ParamOption[] options) {
Collection<Object> checked = (Collection<Object>) params.get(id);
if (checked == null) {
checked = defaultChecked;
}
forms.put(id, new Input(id, defaultChecked, "checkbox", options));
Collection<Object> filtered = new LinkedList<>();
for (Object o : checked) {
if (isValidOption(o, options)) {
filtered.add(o);
}
}
return filtered;
}
private boolean isValidOption(Object o, ParamOption[] options) {
for (ParamOption option : options) {
if (o.equals(option.getValue())) {
return true;
}
}
return false;
}
public void clear() {
this.forms = new LinkedHashMap<>();
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/AngularObjectRegistry.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/AngularObjectRegistry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.display;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* AngularObjectRegistry keeps all the object that binded to Angular Display System.
* AngularObjectRegistry is created per interpreter group.
* It provides three different scope of AngularObjects :
* - Paragraphscope : AngularObject is valid in specific paragraph
* - Notebook scope: AngularObject is valid in a single notebook
* - Global scope : Shared to all notebook that uses the same interpreter group
*/
public class AngularObjectRegistry {
Map<String, Map<String, AngularObject>> registry = new HashMap<>();
private final String GLOBAL_KEY = "_GLOBAL_";
private AngularObjectRegistryListener listener;
private String interpreterId;
AngularObjectListener angularObjectListener;
public AngularObjectRegistry(final String interpreterId,
final AngularObjectRegistryListener listener) {
this.interpreterId = interpreterId;
this.listener = listener;
angularObjectListener = new AngularObjectListener() {
@Override
public void updated(AngularObject updatedObject) {
if (listener != null) {
listener.onUpdate(interpreterId, updatedObject);
}
}
};
}
public AngularObjectRegistryListener getListener() {
return listener;
}
/**
* Add object into registry
*
* Paragraph scope when noteId and paragraphId both not null
* Notebook scope when paragraphId is null
* Global scope when noteId and paragraphId both null
*
* @param name Name of object
* @param o Reference to the object
* @param noteId noteId belonging to. null for global scope
* @param paragraphId paragraphId belongs to. null for notebook scope
* @return AngularObject that added
*/
public AngularObject add(String name, Object o, String noteId, String paragraphId) {
return add(name, o, noteId, paragraphId, true);
}
private String getRegistryKey(String noteId, String paragraphId) {
if (noteId == null) {
return GLOBAL_KEY;
} else {
if (paragraphId == null) {
return noteId;
} else {
return noteId + "_" + paragraphId;
}
}
}
private Map<String, AngularObject> getRegistryForKey(String noteId, String paragraphId) {
synchronized (registry) {
String key = getRegistryKey(noteId, paragraphId);
if (!registry.containsKey(key)) {
registry.put(key, new HashMap<String, AngularObject>());
}
return registry.get(key);
}
}
/**
* Add object into registry
*
* Paragraph scope when noteId and paragraphId both not null
* Notebook scope when paragraphId is null
* Global scope when noteId and paragraphId both null
*
* @param name Name of object
* @param o Reference to the object
* @param noteId noteId belonging to. null for global scope
* @param paragraphId paragraphId belongs to. null for notebook scope
* @param emit skip firing onAdd event on false
* @return AngularObject that added
*/
public AngularObject add(String name, Object o, String noteId, String paragraphId,
boolean emit) {
AngularObject ao = createNewAngularObject(name, o, noteId, paragraphId);
synchronized (registry) {
Map<String, AngularObject> noteLocalRegistry = getRegistryForKey(noteId, paragraphId);
noteLocalRegistry.put(name, ao);
if (listener != null && emit) {
listener.onAdd(interpreterId, ao);
}
}
return ao;
}
protected AngularObject createNewAngularObject(String name, Object o, String noteId,
String paragraphId) {
return new AngularObject(name, o, noteId, paragraphId, angularObjectListener);
}
protected AngularObjectListener getAngularObjectListener() {
return angularObjectListener;
}
/**
* Remove a object from registry
*
* @param name Name of object to remove
* @param noteId noteId belongs to. null for global scope
* @param paragraphId paragraphId belongs to. null for notebook scope
* @return removed object. null if object is not found in registry
*/
public AngularObject remove(String name, String noteId, String paragraphId) {
return remove(name, noteId, paragraphId, true);
}
/**
* Remove a object from registry
*
* @param name Name of object to remove
* @param noteId noteId belongs to. null for global scope
* @param paragraphId paragraphId belongs to. null for notebook scope
* @param emit skip fireing onRemove event on false
* @return removed object. null if object is not found in registry
*/
public AngularObject remove(String name, String noteId, String paragraphId, boolean emit) {
synchronized (registry) {
Map<String, AngularObject> r = getRegistryForKey(noteId, paragraphId);
AngularObject o = r.remove(name);
if (listener != null && emit) {
listener.onRemove(interpreterId, name, noteId, paragraphId);
}
return o;
}
}
/**
* Remove all angular object in the scope.
*
* Remove all paragraph scope angular object when noteId and paragraphId both not null
* Remove all notebook scope angular object when paragraphId is null
* Remove all global scope angular objects when noteId and paragraphId both null
*
* @param noteId noteId
* @param paragraphId paragraphId
*/
public void removeAll(String noteId, String paragraphId) {
synchronized (registry) {
List<AngularObject> all = getAll(noteId, paragraphId);
for (AngularObject ao : all) {
remove(ao.getName(), noteId, paragraphId);
}
}
}
/**
* Get a object from registry
* @param name name of object
* @param noteId noteId that belongs to
* @param paragraphId paragraphId that belongs to
* @return angularobject. null when not found
*/
public AngularObject get(String name, String noteId, String paragraphId) {
synchronized (registry) {
Map<String, AngularObject> r = getRegistryForKey(noteId, paragraphId);
return r.get(name);
}
}
/**
* Get all object in the scope
* @param noteId noteId that belongs to
* @param paragraphId paragraphId that belongs to
* @return all angularobject in the scope
*/
public List<AngularObject> getAll(String noteId, String paragraphId) {
List<AngularObject> all = new LinkedList<>();
synchronized (registry) {
Map<String, AngularObject> r = getRegistryForKey(noteId, paragraphId);
if (r != null) {
all.addAll(r.values());
}
}
return all;
}
/**
* Get all angular object related to specific note.
* That includes all global scope objects, notebook scope objects and paragraph scope objects
* belongs to the noteId.
*
* @param noteId
* @return
*/
public List<AngularObject> getAllWithGlobal(String noteId) {
List<AngularObject> all = new LinkedList<>();
synchronized (registry) {
Map<String, AngularObject> global = getRegistryForKey(null, null);
if (global != null) {
all.addAll(global.values());
}
for (String key : registry.keySet()) {
if (key.startsWith(noteId)) {
all.addAll(registry.get(key).values());
}
}
}
return all;
}
public String getInterpreterGroupId() {
return interpreterId;
}
public Map<String, Map<String, AngularObject>> getRegistry() {
return registry;
}
public void setRegistry(Map<String, Map<String, AngularObject>> registry) {
this.registry = registry;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/Input.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/Input.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.display;
import org.apache.commons.lang.StringUtils;
import java.io.Serializable;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Input type.
*/
public class Input implements Serializable {
/**
* Parameters option.
*/
public static class ParamOption {
Object value;
String displayName;
public ParamOption(Object value, String displayName) {
super();
this.value = value;
this.displayName = displayName;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ParamOption that = (ParamOption) o;
if (value != null ? !value.equals(that.value) : that.value != null) return false;
return displayName != null ? displayName.equals(that.displayName) : that.displayName == null;
}
@Override
public int hashCode() {
int result = value != null ? value.hashCode() : 0;
result = 31 * result + (displayName != null ? displayName.hashCode() : 0);
return result;
}
public Object getValue() {
return value;
}
public void setValue(Object value) {
this.value = value;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
}
String name;
String displayName;
String type;
String argument;
Object defaultValue;
ParamOption[] options;
boolean hidden;
public Input(String name, Object defaultValue, String type) {
this.name = name;
this.displayName = name;
this.defaultValue = defaultValue;
this.type = type;
}
public Input(String name, Object defaultValue, String type, ParamOption[] options) {
this.name = name;
this.displayName = name;
this.defaultValue = defaultValue;
this.type = type;
this.options = options;
}
public Input(String name, String displayName, String type, String argument, Object defaultValue,
ParamOption[] options, boolean hidden) {
super();
this.name = name;
this.displayName = displayName;
this.argument = argument;
this.type = type;
this.defaultValue = defaultValue;
this.options = options;
this.hidden = hidden;
}
@Override
public boolean equals(Object o) {
return name.equals(((Input) o).getName());
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public Object getDefaultValue() {
return defaultValue;
}
public void setDefaultValue(Object defaultValue) {
this.defaultValue = defaultValue;
}
public ParamOption[] getOptions() {
return options;
}
public void setOptions(ParamOption[] options) {
this.options = options;
}
public boolean isHidden() {
return hidden;
}
// Syntax of variables: ${TYPE:NAME=DEFAULT_VALUE1|DEFAULT_VALUE2|...,VALUE1|VALUE2|...}
// Type is optional. Type may contain an optional argument with syntax: TYPE(ARG)
// NAME and VALUEs may contain an optional display name with syntax: NAME(DISPLAY_NAME)
// DEFAULT_VALUEs may not contain display name
// Examples: ${age} input form without default value
// ${age=3} input form with default value
// ${age(Age)=3} input form with display name and default value
// ${country=US(United States)|UK|JP} select form with
// ${checkbox( or ):country(Country)=US|JP,US(United States)|UK|JP}
// checkbox form with " or " as delimiter: will be
// expanded to "US or JP"
private static final Pattern VAR_PTN = Pattern.compile("([_])?[$][{]([^=}]*([=][^}]*)?)[}]");
private static String[] getNameAndDisplayName(String str) {
Pattern p = Pattern.compile("([^(]*)\\s*[(]([^)]*)[)]");
Matcher m = p.matcher(str.trim());
if (m == null || m.find() == false) {
return null;
}
String[] ret = new String[2];
ret[0] = m.group(1);
ret[1] = m.group(2);
return ret;
}
private static String[] getType(String str) {
Pattern p = Pattern.compile("([^:()]*)\\s*([(][^()]*[)])?\\s*:(.*)");
Matcher m = p.matcher(str.trim());
if (m == null || m.find() == false) {
return null;
}
String[] ret = new String[3];
ret[0] = m.group(1).trim();
if (m.group(2) != null) {
ret[1] = m.group(2).trim().replaceAll("[()]", "");
}
ret[2] = m.group(3).trim();
return ret;
}
private static Input getInputForm(Matcher match) {
String hiddenPart = match.group(1);
boolean hidden = false;
if ("_".equals(hiddenPart)) {
hidden = true;
}
String m = match.group(2);
String namePart;
String valuePart;
int p = m.indexOf('=');
if (p > 0) {
namePart = m.substring(0, p);
valuePart = m.substring(p + 1);
} else {
namePart = m;
valuePart = null;
}
String varName;
String displayName = null;
String type = null;
String arg = null;
Object defaultValue = "";
ParamOption[] paramOptions = null;
// get var name type
String varNamePart;
String[] typeArray = getType(namePart);
if (typeArray != null) {
type = typeArray[0];
arg = typeArray[1];
varNamePart = typeArray[2];
} else {
varNamePart = namePart;
}
// get var name and displayname
String[] varNameArray = getNameAndDisplayName(varNamePart);
if (varNameArray != null) {
varName = varNameArray[0];
displayName = varNameArray[1];
} else {
varName = varNamePart.trim();
}
// get defaultValue
if (valuePart != null) {
// find default value
int optionP = valuePart.indexOf(",");
if (optionP >= 0) { // option available
defaultValue = valuePart.substring(0, optionP);
if (type != null && type.equals("checkbox")) {
// checkbox may contain multiple default checks
defaultValue = Input.splitPipe((String) defaultValue);
}
String optionPart = valuePart.substring(optionP + 1);
String[] options = Input.splitPipe(optionPart);
paramOptions = new ParamOption[options.length];
for (int i = 0; i < options.length; i++) {
String[] optNameArray = getNameAndDisplayName(options[i]);
if (optNameArray != null) {
paramOptions[i] = new ParamOption(optNameArray[0], optNameArray[1]);
} else {
paramOptions[i] = new ParamOption(options[i], null);
}
}
} else { // no option
defaultValue = valuePart;
}
}
return new Input(varName, displayName, type, arg, defaultValue, paramOptions, hidden);
}
public static LinkedHashMap<String, Input> extractSimpleQueryForm(String script) {
LinkedHashMap<String, Input> forms = new LinkedHashMap<>();
if (script == null) {
return forms;
}
String replaced = script;
Matcher match = VAR_PTN.matcher(replaced);
while (match.find()) {
Input form = getInputForm(match);
forms.put(form.name, form);
}
forms.remove("pql");
return forms;
}
private static final String DEFAULT_DELIMITER = ",";
public static String getSimpleQuery(Map<String, Object> params, String script) {
String replaced = script;
Matcher match = VAR_PTN.matcher(replaced);
while (match.find()) {
Input input = getInputForm(match);
Object value;
if (params.containsKey(input.name)) {
value = params.get(input.name);
} else {
value = input.defaultValue;
}
String expanded;
if (value instanceof Object[] || value instanceof Collection) { // multi-selection
String delimiter = input.argument;
if (delimiter == null) {
delimiter = DEFAULT_DELIMITER;
}
Collection<Object> checked = value instanceof Collection ? (Collection<Object>) value
: Arrays.asList((Object[]) value);
List<Object> validChecked = new LinkedList<>();
for (Object o : checked) { // filter out obsolete checked values
for (ParamOption option : input.getOptions()) {
if (option.getValue().equals(o)) {
validChecked.add(o);
break;
}
}
}
params.put(input.name, validChecked);
expanded = StringUtils.join(validChecked, delimiter);
} else { // single-selection
expanded = value.toString();
}
replaced = match.replaceFirst(expanded);
match = VAR_PTN.matcher(replaced);
}
return replaced;
}
public static String[] split(String str) {
return str.split(";(?=([^\"']*\"[^\"']*\")*[^\"']*$)");
}
/*
* public static String [] splitPipe(String str){ //return
* str.split("\\|(?=([^\"']*\"[^\"']*\")*[^\"']*$)"); return
* str.split("\\|(?=([^\"']*\"[^\"']*\")*[^\"']*$)"); }
*/
public static String[] splitPipe(String str) {
return split(str, '|');
}
public static String[] split(String str, char split) {
return split(str, new String[] {String.valueOf(split)}, false);
}
public static String[] split(String str, String[] splitters, boolean includeSplitter) {
String escapeSeq = "\"',;${}";
char escapeChar = '\\';
String[] blockStart = new String[] {"\"", "'", "${", "N_(", "N_<"};
String[] blockEnd = new String[] {"\"", "'", "}", "N_)", "N_>"};
return split(str, escapeSeq, escapeChar, blockStart, blockEnd, splitters, includeSplitter);
}
public static String[] split(String str, String escapeSeq, char escapeChar, String[] blockStart,
String[] blockEnd, String[] splitters, boolean includeSplitter) {
List<String> splits = new ArrayList<>();
StringBuilder curString = new StringBuilder();
boolean escape = false; // true when escape char is found
int lastEscapeOffset = -1;
int blockStartPos = -1;
List<Integer> blockStack = new LinkedList<>();
for (int i = 0; i < str.length(); i++) {
char c = str.charAt(i);
// escape char detected
if (c == escapeChar && escape == false) {
escape = true;
continue;
}
// escaped char comes
if (escape == true) {
if (escapeSeq.indexOf(c) < 0) {
curString.append(escapeChar);
}
curString.append(c);
escape = false;
lastEscapeOffset = curString.length();
continue;
}
if (blockStack.size() > 0) { // inside of block
curString.append(c);
// check multichar block
boolean multicharBlockDetected = false;
for (int b = 0; b < blockStart.length; b++) {
if (blockStartPos >= 0
&& getBlockStr(blockStart[b]).compareTo(str.substring(blockStartPos, i)) == 0) {
blockStack.remove(0);
blockStack.add(0, b);
multicharBlockDetected = true;
break;
}
}
if (multicharBlockDetected == true) {
continue;
}
// check if current block is nestable
if (isNestedBlock(blockStart[blockStack.get(0)]) == true) {
// try to find nested block start
if (curString.substring(lastEscapeOffset + 1).endsWith(
getBlockStr(blockStart[blockStack.get(0)])) == true) {
blockStack.add(0, blockStack.get(0)); // block is started
blockStartPos = i;
continue;
}
}
// check if block is finishing
if (curString.substring(lastEscapeOffset + 1).endsWith(
getBlockStr(blockEnd[blockStack.get(0)]))) {
// the block closer is one of the splitters (and not nested block)
if (isNestedBlock(blockEnd[blockStack.get(0)]) == false) {
for (String splitter : splitters) {
if (splitter.compareTo(getBlockStr(blockEnd[blockStack.get(0)])) == 0) {
splits.add(curString.toString());
if (includeSplitter == true) {
splits.add(splitter);
}
curString.setLength(0);
lastEscapeOffset = -1;
break;
}
}
}
blockStartPos = -1;
blockStack.remove(0);
continue;
}
} else { // not in the block
boolean splitted = false;
for (String splitter : splitters) {
// forward check for splitter
int curentLenght = i + splitter.length();
if (splitter.compareTo(str.substring(i, Math.min(curentLenght, str.length()))) == 0) {
splits.add(curString.toString());
if (includeSplitter == true) {
splits.add(splitter);
}
curString.setLength(0);
lastEscapeOffset = -1;
i += splitter.length() - 1;
splitted = true;
break;
}
}
if (splitted == true) {
continue;
}
// add char to current string
curString.append(c);
// check if block is started
for (int b = 0; b < blockStart.length; b++) {
if (curString.substring(lastEscapeOffset + 1)
.endsWith(getBlockStr(blockStart[b])) == true) {
blockStack.add(0, b); // block is started
blockStartPos = i;
break;
}
}
}
}
if (curString.length() > 0) {
splits.add(curString.toString().trim());
}
return splits.toArray(new String[] {});
}
private static String getBlockStr(String blockDef) {
if (blockDef.startsWith("N_")) {
return blockDef.substring("N_".length());
} else {
return blockDef;
}
}
private static boolean isNestedBlock(String blockDef) {
if (blockDef.startsWith("N_")) {
return true;
} else {
return false;
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/AngularObjectWatcher.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/AngularObjectWatcher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.display;
import org.apache.zeppelin.interpreter.InterpreterContext;
/**
*
*/
public abstract class AngularObjectWatcher {
private InterpreterContext context;
public AngularObjectWatcher(InterpreterContext context) {
this.context = context;
}
void watch(Object oldObject, Object newObject) {
watch(oldObject, newObject, context);
}
public abstract void watch(Object oldObject, Object newObject, InterpreterContext context);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/AngularObjectRegistryListener.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/AngularObjectRegistryListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.display;
/**
*
*
*/
public interface AngularObjectRegistryListener {
public void onAdd(String interpreterGroupId, AngularObject object);
public void onUpdate(String interpreterGroupId, AngularObject object);
public void onRemove(String interpreterGroupId, String name, String noteId, String paragraphId);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/AngularObjectListener.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/AngularObjectListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.display;
/**
*
*/
public interface AngularObjectListener {
public void updated(AngularObject updatedObject);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Scheduler.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Scheduler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.scheduler;
import java.util.Collection;
/**
* Interface for scheduler
*/
public interface Scheduler extends Runnable {
public String getName();
public Collection<Job> getJobsWaiting();
public Collection<Job> getJobsRunning();
public void submit(Job job);
public Job removeFromWaitingQueue(String jobId);
public void stop();
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/ExecutorFactory.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/ExecutorFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.scheduler;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
*
*/
public class ExecutorFactory {
private static ExecutorFactory _executor;
private static Long _executorLock = new Long(0);
Map<String, ExecutorService> executor = new HashMap<>();
public ExecutorFactory() {
}
public static ExecutorFactory singleton() {
if (_executor == null) {
synchronized (_executorLock) {
if (_executor == null) {
_executor = new ExecutorFactory();
}
}
}
return _executor;
}
public ExecutorService getDefaultExecutor() {
return createOrGet("default");
}
public ExecutorService createOrGet(String name) {
return createOrGet(name, 100);
}
public ExecutorService createOrGet(String name, int numThread) {
synchronized (executor) {
if (!executor.containsKey(name)) {
executor.put(name, Executors.newScheduledThreadPool(numThread));
}
return executor.get(name);
}
}
public void shutdown(String name) {
synchronized (executor) {
if (executor.containsKey(name)) {
ExecutorService e = executor.get(name);
e.shutdown();
executor.remove(name);
}
}
}
public void shutdownAll() {
synchronized (executor) {
for (String name : executor.keySet()){
shutdown(name);
}
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerListener.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.scheduler;
/**
* TODO(moon) : add description.
*/
public interface SchedulerListener {
public void jobStarted(Scheduler scheduler, Job job);
public void jobFinished(Scheduler scheduler, Job job);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobProgressPoller.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobProgressPoller.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.scheduler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TODO(moon) : add description.
*/
public class JobProgressPoller extends Thread {
public static final long DEFAULT_INTERVAL_MSEC = 500;
Logger logger = LoggerFactory.getLogger(JobProgressPoller.class);
private Job job;
private long intervalMs;
boolean terminate = false;
public JobProgressPoller(Job job, long intervalMs) {
this.job = job;
this.intervalMs = intervalMs;
}
@Override
public void run() {
if (intervalMs < 0) {
return;
} else if (intervalMs == 0) {
intervalMs = DEFAULT_INTERVAL_MSEC;
}
while (terminate == false) {
JobListener listener = job.getListener();
if (listener != null) {
try {
if (job.isRunning()) {
listener.onProgressUpdate(job, job.progress());
}
} catch (Exception e) {
logger.error("Can not get or update progress", e);
}
}
try {
Thread.sleep(intervalMs);
} catch (InterruptedException e) {
logger.error("Exception in JobProgressPoller while run Thread.sleep", e);
}
}
}
public void terminate() {
terminate = true;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/FIFOScheduler.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/FIFOScheduler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.scheduler;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import org.apache.zeppelin.scheduler.Job.Status;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* FIFOScheduler runs submitted job sequentially
*/
public class FIFOScheduler implements Scheduler {
List<Job> queue = new LinkedList<>();
private ExecutorService executor;
private SchedulerListener listener;
boolean terminate = false;
Job runningJob = null;
private String name;
static Logger LOGGER = LoggerFactory.getLogger(FIFOScheduler.class);
public FIFOScheduler(String name, ExecutorService executor, SchedulerListener listener) {
this.name = name;
this.executor = executor;
this.listener = listener;
}
@Override
public String getName() {
return name;
}
@Override
public Collection<Job> getJobsWaiting() {
List<Job> ret = new LinkedList<>();
synchronized (queue) {
for (Job job : queue) {
ret.add(job);
}
}
return ret;
}
@Override
public Collection<Job> getJobsRunning() {
List<Job> ret = new LinkedList<>();
Job job = runningJob;
if (job != null) {
ret.add(job);
}
return ret;
}
@Override
public void submit(Job job) {
job.setStatus(Status.PENDING);
synchronized (queue) {
queue.add(job);
queue.notify();
}
}
@Override
public Job removeFromWaitingQueue(String jobId) {
synchronized (queue) {
Iterator<Job> it = queue.iterator();
while (it.hasNext()) {
Job job = it.next();
if (job.getId().equals(jobId)) {
it.remove();
return job;
}
}
}
return null;
}
@Override
public void run() {
synchronized (queue) {
while (terminate == false) {
synchronized (queue) {
if (runningJob != null || queue.isEmpty() == true) {
try {
queue.wait(500);
} catch (InterruptedException e) {
LOGGER.error("Exception in FIFOScheduler while run queue.wait", e);
}
continue;
}
runningJob = queue.remove(0);
}
final Scheduler scheduler = this;
this.executor.execute(new Runnable() {
@Override
public void run() {
if (runningJob.isAborted()) {
runningJob.setStatus(Status.ABORT);
runningJob.aborted = false;
synchronized (queue) {
queue.notify();
}
return;
}
runningJob.setStatus(Status.RUNNING);
if (listener != null) {
listener.jobStarted(scheduler, runningJob);
}
runningJob.run();
if (runningJob.isAborted()) {
runningJob.setStatus(Status.ABORT);
} else {
if (runningJob.getException() != null) {
runningJob.setStatus(Status.ERROR);
} else {
runningJob.setStatus(Status.FINISHED);
}
}
if (listener != null) {
listener.jobFinished(scheduler, runningJob);
}
// reset aborted flag to allow retry
runningJob.aborted = false;
runningJob = null;
synchronized (queue) {
queue.notify();
}
}
});
}
}
}
@Override
public void stop() {
terminate = true;
synchronized (queue) {
queue.notify();
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerFactory.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.scheduler;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcess;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TODO(moon) : add description.
*/
public class SchedulerFactory implements SchedulerListener {
private static final Logger logger = LoggerFactory.getLogger(SchedulerFactory.class);
ExecutorService executor;
Map<String, Scheduler> schedulers = new LinkedHashMap<>();
private static SchedulerFactory singleton;
private static Long singletonLock = new Long(0);
public static SchedulerFactory singleton() {
if (singleton == null) {
synchronized (singletonLock) {
if (singleton == null) {
try {
singleton = new SchedulerFactory();
} catch (Exception e) {
logger.error(e.toString(), e);
}
}
}
}
return singleton;
}
public SchedulerFactory() throws Exception {
executor = ExecutorFactory.singleton().createOrGet("schedulerFactory", 100);
}
public void destroy() {
ExecutorFactory.singleton().shutdown("schedulerFactory");
}
public Scheduler createOrGetFIFOScheduler(String name) {
synchronized (schedulers) {
if (schedulers.containsKey(name) == false) {
Scheduler s = new FIFOScheduler(name, executor, this);
schedulers.put(name, s);
executor.execute(s);
}
return schedulers.get(name);
}
}
public Scheduler createOrGetParallelScheduler(String name, int maxConcurrency) {
synchronized (schedulers) {
if (schedulers.containsKey(name) == false) {
Scheduler s = new ParallelScheduler(name, executor, this, maxConcurrency);
schedulers.put(name, s);
executor.execute(s);
}
return schedulers.get(name);
}
}
public Scheduler createOrGetRemoteScheduler(
String name,
String noteId,
RemoteInterpreterProcess interpreterProcess,
int maxConcurrency) {
synchronized (schedulers) {
if (schedulers.containsKey(name) == false) {
Scheduler s = new RemoteScheduler(
name,
executor,
noteId,
interpreterProcess,
this,
maxConcurrency);
schedulers.put(name, s);
executor.execute(s);
}
return schedulers.get(name);
}
}
public Scheduler removeScheduler(String name) {
synchronized (schedulers) {
Scheduler s = schedulers.remove(name);
if (s != null) {
s.stop();
}
}
return null;
}
public Collection<Scheduler> listScheduler(String name) {
List<Scheduler> s = new LinkedList<>();
synchronized (schedulers) {
for (Scheduler ss : schedulers.values()) {
s.add(ss);
}
}
return s;
}
@Override
public void jobStarted(Scheduler scheduler, Job job) {
logger.info("Job " + job.getJobName() + " started by scheduler " + scheduler.getName());
}
@Override
public void jobFinished(Scheduler scheduler, Job job) {
logger.info("Job " + job.getJobName() + " finished by scheduler " + scheduler.getName());
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Job.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Job.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.scheduler;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Skeletal implementation of the Job concept.
* - designed for inheritance
* - should be run on a separate thread
* - maintains internal state: it's status
* - supports listeners who are updated on status change
*
* Job class is serialized/deserialized and used server<->client communication
* and saving/loading jobs from disk.
* Changing/adding/deleting non transitive field name need consideration of that.
*/
public abstract class Job {
/**
* Job status.
*
* READY - Job is not running, ready to run.
* PENDING - Job is submitted to scheduler. but not running yet
* RUNNING - Job is running.
* FINISHED - Job finished run. with success
* ERROR - Job finished run. with error
* ABORT - Job finished by abort
*/
public static enum Status {
READY, PENDING, RUNNING, FINISHED, ERROR, ABORT;
public boolean isReady() {
return this == READY;
}
public boolean isRunning() {
return this == RUNNING;
}
public boolean isPending() {
return this == PENDING;
}
}
private String jobName;
String id;
Date dateCreated;
Date dateStarted;
Date dateFinished;
Status status;
static Logger LOGGER = LoggerFactory.getLogger(Job.class);
transient boolean aborted = false;
String errorMessage;
private transient Throwable exception;
private transient JobListener listener;
private long progressUpdateIntervalMs;
public Job(String jobName, JobListener listener, long progressUpdateIntervalMs) {
this.jobName = jobName;
this.listener = listener;
this.progressUpdateIntervalMs = progressUpdateIntervalMs;
dateCreated = new Date();
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd-HHmmss");
id = dateFormat.format(dateCreated) + "_" + super.hashCode();
setStatus(Status.READY);
}
public Job(String jobName, JobListener listener) {
this(jobName, listener, JobProgressPoller.DEFAULT_INTERVAL_MSEC);
}
public Job(String jobId, String jobName, JobListener listener) {
this(jobId, jobName, listener, JobProgressPoller.DEFAULT_INTERVAL_MSEC);
}
public Job(String jobId, String jobName, JobListener listener, long progressUpdateIntervalMs) {
this.jobName = jobName;
this.listener = listener;
this.progressUpdateIntervalMs = progressUpdateIntervalMs;
dateCreated = new Date();
id = jobId;
setStatus(Status.READY);
}
public void setId(String id) {
this.id = id;
}
public String getId() {
return id;
}
@Override
public int hashCode() {
return id.hashCode();
}
@Override
public boolean equals(Object o) {
return ((Job) o).hashCode() == hashCode();
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
if (this.status == status) {
return;
}
Status before = this.status;
Status after = status;
if (listener != null) {
listener.beforeStatusChange(this, before, after);
}
this.status = status;
if (listener != null) {
listener.afterStatusChange(this, before, after);
}
}
public void setListener(JobListener listener) {
this.listener = listener;
}
public JobListener getListener() {
return listener;
}
public boolean isTerminated() {
return !this.status.isReady() && !this.status.isRunning() && !this.status.isPending();
}
public boolean isRunning() {
return this.status.isRunning();
}
public void run() {
JobProgressPoller progressUpdator = null;
try {
progressUpdator = new JobProgressPoller(this, progressUpdateIntervalMs);
progressUpdator.start();
dateStarted = new Date();
setResult(jobRun());
this.exception = null;
errorMessage = null;
dateFinished = new Date();
progressUpdator.terminate();
} catch (NullPointerException e) {
LOGGER.error("Job failed", e);
progressUpdator.terminate();
this.exception = e;
setResult(e.getMessage());
errorMessage = getStack(e);
dateFinished = new Date();
} catch (Throwable e) {
LOGGER.error("Job failed", e);
progressUpdator.terminate();
this.exception = e;
setResult(e.getMessage());
errorMessage = getStack(e);
dateFinished = new Date();
} finally {
//aborted = false;
}
}
public static String getStack(Throwable e) {
if (e == null) {
return "";
}
Throwable cause = ExceptionUtils.getRootCause(e);
if (cause != null) {
return ExceptionUtils.getFullStackTrace(cause);
} else {
return ExceptionUtils.getFullStackTrace(e);
}
}
public Throwable getException() {
return exception;
}
protected void setException(Throwable t) {
exception = t;
errorMessage = getStack(t);
}
public abstract Object getReturn();
public String getJobName() {
return jobName;
}
public void setJobName(String jobName) {
this.jobName = jobName;
}
public abstract int progress();
public abstract Map<String, Object> info();
protected abstract Object jobRun() throws Throwable;
protected abstract boolean jobAbort();
public void abort() {
aborted = jobAbort();
}
public boolean isAborted() {
return aborted;
}
public Date getDateCreated() {
return dateCreated;
}
public Date getDateStarted() {
return dateStarted;
}
public Date getDateFinished() {
return dateFinished;
}
public abstract void setResult(Object results);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/ParallelScheduler.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/ParallelScheduler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.scheduler;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import org.apache.zeppelin.scheduler.Job.Status;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Parallel scheduler runs submitted job concurrently.
*/
public class ParallelScheduler implements Scheduler {
List<Job> queue = new LinkedList<>();
List<Job> running = new LinkedList<>();
private ExecutorService executor;
private SchedulerListener listener;
boolean terminate = false;
private String name;
private int maxConcurrency;
static Logger LOGGER = LoggerFactory.getLogger(ParallelScheduler.class);
public ParallelScheduler(String name, ExecutorService executor, SchedulerListener listener,
int maxConcurrency) {
this.name = name;
this.executor = executor;
this.listener = listener;
this.maxConcurrency = maxConcurrency;
}
@Override
public String getName() {
return name;
}
@Override
public Collection<Job> getJobsWaiting() {
List<Job> ret = new LinkedList<>();
synchronized (queue) {
for (Job job : queue) {
ret.add(job);
}
}
return ret;
}
@Override
public Job removeFromWaitingQueue(String jobId) {
synchronized (queue) {
Iterator<Job> it = queue.iterator();
while (it.hasNext()) {
Job job = it.next();
if (job.getId().equals(jobId)) {
it.remove();
return job;
}
}
}
return null;
}
@Override
public Collection<Job> getJobsRunning() {
List<Job> ret = new LinkedList<>();
synchronized (queue) {
for (Job job : running) {
ret.add(job);
}
}
return ret;
}
@Override
public void submit(Job job) {
job.setStatus(Status.PENDING);
synchronized (queue) {
queue.add(job);
queue.notify();
}
}
@Override
public void run() {
while (terminate == false) {
Job job = null;
synchronized (queue) {
if (running.size() >= maxConcurrency || queue.isEmpty() == true) {
try {
queue.wait(500);
} catch (InterruptedException e) {
LOGGER.error("Exception in MockInterpreterAngular while interpret queue.wait", e);
}
continue;
}
job = queue.remove(0);
running.add(job);
}
Scheduler scheduler = this;
executor.execute(new JobRunner(scheduler, job));
}
}
public void setMaxConcurrency(int maxConcurrency) {
this.maxConcurrency = maxConcurrency;
synchronized (queue) {
queue.notify();
}
}
private class JobRunner implements Runnable {
private Scheduler scheduler;
private Job job;
public JobRunner(Scheduler scheduler, Job job) {
this.scheduler = scheduler;
this.job = job;
}
@Override
public void run() {
if (job.isAborted()) {
job.setStatus(Status.ABORT);
job.aborted = false;
synchronized (queue) {
running.remove(job);
queue.notify();
}
return;
}
job.setStatus(Status.RUNNING);
if (listener != null) {
listener.jobStarted(scheduler, job);
}
job.run();
if (job.isAborted()) {
job.setStatus(Status.ABORT);
} else {
if (job.getException() != null) {
job.setStatus(Status.ERROR);
} else {
job.setStatus(Status.FINISHED);
}
}
if (listener != null) {
listener.jobFinished(scheduler, job);
}
// reset aborted flag to allow retry
job.aborted = false;
synchronized (queue) {
running.remove(job);
queue.notify();
}
}
}
@Override
public void stop() {
terminate = true;
synchronized (queue) {
queue.notify();
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobListener.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.scheduler;
/**
* TODO(moon) : add description.
*/
public interface JobListener {
public void onProgressUpdate(Job job, int progress);
public void beforeStatusChange(Job job, Job.Status before, Job.Status after);
public void afterStatusChange(Job job, Job.Status before, Job.Status after);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/RemoteScheduler.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/RemoteScheduler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.scheduler;
import org.apache.thrift.TException;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
import org.apache.zeppelin.interpreter.remote.RemoteInterpreterManagedProcess;
import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcess;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
import org.apache.zeppelin.scheduler.Job.Status;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutorService;
/**
* RemoteScheduler runs in ZeppelinServer and proxies Scheduler running on RemoteInterpreter
*/
public class RemoteScheduler implements Scheduler {
Logger logger = LoggerFactory.getLogger(RemoteScheduler.class);
List<Job> queue = new LinkedList<>();
List<Job> running = new LinkedList<>();
private ExecutorService executor;
private SchedulerListener listener;
boolean terminate = false;
private String name;
private int maxConcurrency;
private final String noteId;
private RemoteInterpreterProcess interpreterProcess;
public RemoteScheduler(String name, ExecutorService executor, String noteId,
RemoteInterpreterProcess interpreterProcess, SchedulerListener listener,
int maxConcurrency) {
this.name = name;
this.executor = executor;
this.listener = listener;
this.noteId = noteId;
this.interpreterProcess = interpreterProcess;
this.maxConcurrency = maxConcurrency;
}
@Override
public void run() {
while (terminate == false) {
Job job = null;
synchronized (queue) {
if (running.size() >= maxConcurrency || queue.isEmpty() == true) {
try {
queue.wait(500);
} catch (InterruptedException e) {
logger.error("Exception in RemoteScheduler while run queue.wait", e);
}
continue;
}
job = queue.remove(0);
running.add(job);
}
// run
Scheduler scheduler = this;
JobRunner jobRunner = new JobRunner(scheduler, job);
executor.execute(jobRunner);
// wait until it is submitted to the remote
while (!jobRunner.isJobSubmittedInRemote()) {
synchronized (queue) {
try {
queue.wait(500);
} catch (InterruptedException e) {
logger.error("Exception in RemoteScheduler while jobRunner.isJobSubmittedInRemote " +
"queue.wait", e);
}
}
}
}
}
@Override
public String getName() {
return name;
}
@Override
public Collection<Job> getJobsWaiting() {
List<Job> ret = new LinkedList<>();
synchronized (queue) {
for (Job job : queue) {
ret.add(job);
}
}
return ret;
}
@Override
public Job removeFromWaitingQueue(String jobId) {
synchronized (queue) {
Iterator<Job> it = queue.iterator();
while (it.hasNext()) {
Job job = it.next();
if (job.getId().equals(jobId)) {
it.remove();
return job;
}
}
}
return null;
}
@Override
public Collection<Job> getJobsRunning() {
List<Job> ret = new LinkedList<>();
synchronized (queue) {
for (Job job : running) {
ret.add(job);
}
}
return ret;
}
@Override
public void submit(Job job) {
if (terminate) {
throw new RuntimeException("Scheduler already terminated");
}
job.setStatus(Status.PENDING);
synchronized (queue) {
queue.add(job);
queue.notify();
}
}
public void setMaxConcurrency(int maxConcurrency) {
this.maxConcurrency = maxConcurrency;
synchronized (queue) {
queue.notify();
}
}
/**
* Role of the class is get status info from remote process from PENDING to
* RUNNING status.
*/
private class JobStatusPoller extends Thread {
private long initialPeriodMsec;
private long initialPeriodCheckIntervalMsec;
private long checkIntervalMsec;
private boolean terminate;
private JobListener listener;
private Job job;
Status lastStatus;
public JobStatusPoller(long initialPeriodMsec,
long initialPeriodCheckIntervalMsec, long checkIntervalMsec, Job job,
JobListener listener) {
this.initialPeriodMsec = initialPeriodMsec;
this.initialPeriodCheckIntervalMsec = initialPeriodCheckIntervalMsec;
this.checkIntervalMsec = checkIntervalMsec;
this.job = job;
this.listener = listener;
this.terminate = false;
}
@Override
public void run() {
long started = System.currentTimeMillis();
while (terminate == false) {
long current = System.currentTimeMillis();
long interval;
if (current - started < initialPeriodMsec) {
interval = initialPeriodCheckIntervalMsec;
} else {
interval = checkIntervalMsec;
}
synchronized (this) {
try {
this.wait(interval);
} catch (InterruptedException e) {
logger.error("Exception in RemoteScheduler while run this.wait", e);
}
}
if (terminate) {
// terminated by shutdown
break;
}
Status newStatus = getStatus();
if (newStatus == null) { // unknown
continue;
}
if (newStatus != Status.READY && newStatus != Status.PENDING) {
// we don't need more
break;
}
}
terminate = true;
}
public void shutdown() {
terminate = true;
synchronized (this) {
this.notify();
}
}
private Status getLastStatus() {
if (terminate == true) {
if (lastStatus != Status.FINISHED &&
lastStatus != Status.ERROR &&
lastStatus != Status.ABORT) {
return Status.FINISHED;
} else {
return (lastStatus == null) ? Status.FINISHED : lastStatus;
}
} else {
return (lastStatus == null) ? Status.FINISHED : lastStatus;
}
}
public synchronized Job.Status getStatus() {
if (interpreterProcess.referenceCount() <= 0) {
return getLastStatus();
}
Client client;
try {
client = interpreterProcess.getClient();
} catch (Exception e) {
logger.error("Can't get status information", e);
lastStatus = Status.ERROR;
return Status.ERROR;
}
boolean broken = false;
try {
String statusStr = client.getStatus(noteId, job.getId());
if ("Unknown".equals(statusStr)) {
// not found this job in the remote schedulers.
// maybe not submitted, maybe already finished
//Status status = getLastStatus();
listener.afterStatusChange(job, null, null);
return job.getStatus();
}
Status status = Status.valueOf(statusStr);
lastStatus = status;
listener.afterStatusChange(job, null, status);
return status;
} catch (TException e) {
broken = true;
logger.error("Can't get status information", e);
lastStatus = Status.ERROR;
return Status.ERROR;
} catch (Exception e) {
logger.error("Unknown status", e);
lastStatus = Status.ERROR;
return Status.ERROR;
} finally {
interpreterProcess.releaseClient(client, broken);
}
}
}
private class JobRunner implements Runnable, JobListener {
private Scheduler scheduler;
private Job job;
private boolean jobExecuted;
boolean jobSubmittedRemotely;
public JobRunner(Scheduler scheduler, Job job) {
this.scheduler = scheduler;
this.job = job;
jobExecuted = false;
jobSubmittedRemotely = false;
}
public boolean isJobSubmittedInRemote() {
return jobSubmittedRemotely;
}
@Override
public void run() {
if (job.isAborted()) {
job.setStatus(Status.ABORT);
job.aborted = false;
synchronized (queue) {
running.remove(job);
queue.notify();
}
jobSubmittedRemotely = true;
return;
}
JobStatusPoller jobStatusPoller = new JobStatusPoller(1500, 100, 500,
job, this);
jobStatusPoller.start();
if (listener != null) {
listener.jobStarted(scheduler, job);
}
job.run();
jobExecuted = true;
jobSubmittedRemotely = true;
jobStatusPoller.shutdown();
try {
jobStatusPoller.join();
} catch (InterruptedException e) {
logger.error("JobStatusPoller interrupted", e);
}
// set job status based on result.
Status lastStatus = jobStatusPoller.getStatus();
Object jobResult = job.getReturn();
if (jobResult != null && jobResult instanceof InterpreterResult) {
if (((InterpreterResult) jobResult).code() == Code.ERROR) {
lastStatus = Status.ERROR;
}
}
if (job.getException() != null) {
lastStatus = Status.ERROR;
}
job.setStatus(lastStatus);
if (listener != null) {
listener.jobFinished(scheduler, job);
}
// reset aborted flag to allow retry
job.aborted = false;
synchronized (queue) {
running.remove(job);
queue.notify();
}
}
@Override
public void onProgressUpdate(Job job, int progress) {
}
@Override
public void beforeStatusChange(Job job, Status before, Status after) {
}
@Override
public void afterStatusChange(Job job, Status before, Status after) {
if (after == null) { // unknown. maybe before sumitted remotely, maybe already finished.
if (jobExecuted) {
jobSubmittedRemotely = true;
Object jobResult = job.getReturn();
if (job.isAborted()) {
job.setStatus(Status.ABORT);
} else if (job.getException() != null) {
job.setStatus(Status.ERROR);
} else if (jobResult != null && jobResult instanceof InterpreterResult
&& ((InterpreterResult) jobResult).code() == Code.ERROR) {
job.setStatus(Status.ERROR);
} else {
job.setStatus(Status.FINISHED);
}
}
return;
}
// Update remoteStatus
if (jobExecuted == false) {
if (after == Status.FINISHED || after == Status.ABORT
|| after == Status.ERROR) {
// it can be status of last run.
// so not updating the remoteStatus
return;
} else if (after == Status.RUNNING) {
jobSubmittedRemotely = true;
}
} else {
jobSubmittedRemotely = true;
}
// status polled by status poller
if (job.getStatus() != after) {
job.setStatus(after);
}
}
}
@Override
public void stop() {
terminate = true;
synchronized (queue) {
queue.notify();
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/RepositorySystemFactory.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/RepositorySystemFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.dep;
import org.apache.maven.repository.internal.DefaultServiceLocator;
import org.apache.maven.wagon.Wagon;
import org.apache.maven.wagon.providers.http.HttpWagon;
import org.apache.maven.wagon.providers.http.LightweightHttpWagon;
import org.sonatype.aether.RepositorySystem;
import org.sonatype.aether.connector.file.FileRepositoryConnectorFactory;
import org.sonatype.aether.connector.wagon.WagonProvider;
import org.sonatype.aether.connector.wagon.WagonRepositoryConnectorFactory;
import org.sonatype.aether.spi.connector.RepositoryConnectorFactory;
/**
* Get maven repository instance.
*/
public class RepositorySystemFactory {
public static RepositorySystem newRepositorySystem() {
DefaultServiceLocator locator = new DefaultServiceLocator();
locator.addService(RepositoryConnectorFactory.class, FileRepositoryConnectorFactory.class);
locator.addService(RepositoryConnectorFactory.class, WagonRepositoryConnectorFactory.class);
locator.setServices(WagonProvider.class, new ManualWagonProvider());
return locator.getService(RepositorySystem.class);
}
/**
* ManualWagonProvider
*/
public static class ManualWagonProvider implements WagonProvider {
@Override
public Wagon lookup(String roleHint) throws Exception {
if ("http".equals(roleHint)) {
return new LightweightHttpWagon();
}
if ("https".equals(roleHint)) {
return new HttpWagon();
}
return null;
}
@Override
public void release(Wagon arg0) {
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/TransferListener.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/TransferListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.dep;
import java.io.PrintStream;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sonatype.aether.transfer.AbstractTransferListener;
import org.sonatype.aether.transfer.TransferEvent;
import org.sonatype.aether.transfer.TransferResource;
/**
* Simple listener that show deps downloading progress.
*/
public class TransferListener extends AbstractTransferListener {
Logger logger = LoggerFactory.getLogger(TransferListener.class);
private PrintStream out;
private Map<TransferResource, Long> downloads = new ConcurrentHashMap<>();
private int lastLength;
public TransferListener() {}
@Override
public void transferInitiated(TransferEvent event) {
String message =
event.getRequestType() == TransferEvent.RequestType.PUT ? "Uploading" : "Downloading";
logger.info(message + ": " + event.getResource().getRepositoryUrl()
+ event.getResource().getResourceName());
}
@Override
public void transferProgressed(TransferEvent event) {
TransferResource resource = event.getResource();
downloads.put(resource, Long.valueOf(event.getTransferredBytes()));
StringBuilder buffer = new StringBuilder(64);
for (Map.Entry<TransferResource, Long> entry : downloads.entrySet()) {
long total = entry.getKey().getContentLength();
long complete = entry.getValue().longValue();
buffer.append(getStatus(complete, total)).append(" ");
}
int pad = lastLength - buffer.length();
lastLength = buffer.length();
pad(buffer, pad);
buffer.append('\r');
logger.info(buffer.toString());
}
private String getStatus(long complete, long total) {
if (total >= 1024) {
return toKB(complete) + "/" + toKB(total) + " KB ";
} else if (total >= 0) {
return complete + "/" + total + " B ";
} else if (complete >= 1024) {
return toKB(complete) + " KB ";
} else {
return complete + " B ";
}
}
private void pad(StringBuilder buffer, int spaces) {
String block = " ";
while (spaces > 0) {
int n = Math.min(spaces, block.length());
buffer.append(block, 0, n);
spaces -= n;
}
}
@Override
public void transferSucceeded(TransferEvent event) {
transferCompleted(event);
TransferResource resource = event.getResource();
long contentLength = event.getTransferredBytes();
if (contentLength >= 0) {
String type =
(event.getRequestType() == TransferEvent.RequestType.PUT ? "Uploaded" : "Downloaded");
String len = contentLength >= 1024 ? toKB(contentLength) + " KB" : contentLength + " B";
String throughput = "";
long duration = System.currentTimeMillis() - resource.getTransferStartTime();
if (duration > 0) {
DecimalFormat format = new DecimalFormat("0.0", new DecimalFormatSymbols(Locale.ENGLISH));
double kbPerSec = (contentLength / 1024.0) / (duration / 1000.0);
throughput = " at " + format.format(kbPerSec) + " KB/sec";
}
logger.info(type + ": " + resource.getRepositoryUrl() + resource.getResourceName() + " ("
+ len + throughput + ")");
}
}
@Override
public void transferFailed(TransferEvent event) {
transferCompleted(event);
event.getException().printStackTrace(out);
}
private void transferCompleted(TransferEvent event) {
downloads.remove(event.getResource());
StringBuilder buffer = new StringBuilder(64);
pad(buffer, lastLength);
buffer.append('\r');
logger.info(buffer.toString());
}
@Override
public void transferCorrupted(TransferEvent event) {
event.getException().printStackTrace(out);
}
protected long toKB(long bytes) {
return (bytes + 1023) / 1024;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/Repository.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/Repository.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.dep;
import static org.apache.commons.lang.StringUtils.isNotBlank;
import org.sonatype.aether.repository.Authentication;
import org.sonatype.aether.repository.Proxy;
/**
*
*
*/
public class Repository {
private boolean snapshot = false;
private String id;
private String url;
private String username = null;
private String password = null;
private String proxyProtocol = "HTTP";
private String proxyHost = null;
private Integer proxyPort = null;
private String proxyLogin = null;
private String proxyPassword = null;
public Repository(String id){
this.id = id;
}
public Repository url(String url) {
this.url = url;
return this;
}
public Repository snapshot() {
snapshot = true;
return this;
}
public boolean isSnapshot() {
return snapshot;
}
public String getId() {
return id;
}
public String getUrl() {
return url;
}
public Repository username(String username) {
this.username = username;
return this;
}
public Repository password(String password) {
this.password = password;
return this;
}
public Repository credentials(String username, String password) {
this.username = username;
this.password = password;
return this;
}
public Authentication getAuthentication() {
Authentication auth = null;
if (this.username != null && this.password != null) {
auth = new Authentication(this.username, this.password);
}
return auth;
}
public Proxy getProxy() {
if (isNotBlank(proxyHost) && proxyPort != null) {
if (isNotBlank(proxyLogin)) {
return new Proxy(proxyProtocol, proxyHost, proxyPort,
new Authentication(proxyLogin, proxyPassword));
} else {
return new Proxy(proxyProtocol, proxyHost, proxyPort, null);
}
}
return null;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/DependencyResolver.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/DependencyResolver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.dep;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sonatype.aether.RepositoryException;
import org.sonatype.aether.artifact.Artifact;
import org.sonatype.aether.collection.CollectRequest;
import org.sonatype.aether.collection.DependencyCollectionException;
import org.sonatype.aether.graph.Dependency;
import org.sonatype.aether.graph.DependencyFilter;
import org.sonatype.aether.repository.RemoteRepository;
import org.sonatype.aether.resolution.ArtifactResult;
import org.sonatype.aether.resolution.DependencyRequest;
import org.sonatype.aether.util.artifact.DefaultArtifact;
import org.sonatype.aether.util.artifact.JavaScopes;
import org.sonatype.aether.util.filter.DependencyFilterUtils;
import org.sonatype.aether.util.filter.PatternExclusionsDependencyFilter;
import org.sonatype.aether.util.graph.DefaultDependencyNode;
/**
* Deps resolver.
* Add new dependencies from mvn repo (at runtime) to Zeppelin.
*/
public class DependencyResolver extends AbstractDependencyResolver {
Logger logger = LoggerFactory.getLogger(DependencyResolver.class);
private final String[] exclusions = new String[] {"org.apache.zeppelin:zeppelin-zengine",
"org.apache.zeppelin:zeppelin-interpreter",
"org.apache.zeppelin:zeppelin-server"};
public DependencyResolver(String localRepoPath) {
super(localRepoPath);
}
public List<File> load(String artifact)
throws RepositoryException, IOException {
return load(artifact, new LinkedList<String>());
}
public synchronized List<File> load(String artifact, Collection<String> excludes)
throws RepositoryException, IOException {
if (StringUtils.isBlank(artifact)) {
// Skip dependency loading if artifact is empty
return new LinkedList<>();
}
// <groupId>:<artifactId>[:<extension>[:<classifier>]]:<version>
int numSplits = artifact.split(":").length;
if (numSplits >= 3 && numSplits <= 6) {
return loadFromMvn(artifact, excludes);
} else {
LinkedList<File> libs = new LinkedList<>();
libs.add(new File(artifact));
return libs;
}
}
public List<File> load(String artifact, File destPath) throws IOException, RepositoryException {
return load(artifact, new LinkedList<String>(), destPath);
}
public List<File> load(String artifact, Collection<String> excludes, File destPath)
throws RepositoryException, IOException {
List<File> libs = new LinkedList<>();
if (StringUtils.isNotBlank(artifact)) {
libs = load(artifact, excludes);
for (File srcFile : libs) {
File destFile = new File(destPath, srcFile.getName());
if (!destFile.exists() || !FileUtils.contentEquals(srcFile, destFile)) {
FileUtils.copyFile(srcFile, destFile);
logger.debug("copy {} to {}", srcFile.getAbsolutePath(), destPath);
}
}
}
return libs;
}
public synchronized void copyLocalDependency(String srcPath, File destPath)
throws IOException {
if (StringUtils.isBlank(srcPath)) {
return;
}
File srcFile = new File(srcPath);
File destFile = new File(destPath, srcFile.getName());
if (!destFile.exists() || !FileUtils.contentEquals(srcFile, destFile)) {
FileUtils.copyFile(srcFile, destFile);
logger.debug("copy {} to {}", srcFile.getAbsolutePath(), destPath);
}
}
private List<File> loadFromMvn(String artifact, Collection<String> excludes)
throws RepositoryException {
Collection<String> allExclusions = new LinkedList<>();
allExclusions.addAll(excludes);
allExclusions.addAll(Arrays.asList(exclusions));
List<ArtifactResult> listOfArtifact;
listOfArtifact = getArtifactsWithDep(artifact, allExclusions);
Iterator<ArtifactResult> it = listOfArtifact.iterator();
while (it.hasNext()) {
Artifact a = it.next().getArtifact();
String gav = a.getGroupId() + ":" + a.getArtifactId() + ":" + a.getVersion();
for (String exclude : allExclusions) {
if (gav.startsWith(exclude)) {
it.remove();
break;
}
}
}
List<File> files = new LinkedList<>();
for (ArtifactResult artifactResult : listOfArtifact) {
files.add(artifactResult.getArtifact().getFile());
logger.debug("load {}", artifactResult.getArtifact().getFile().getAbsolutePath());
}
return files;
}
/**
* @param dependency
* @param excludes list of pattern can either be of the form groupId:artifactId
* @return
* @throws Exception
*/
@Override
public List<ArtifactResult> getArtifactsWithDep(String dependency,
Collection<String> excludes) throws RepositoryException {
Artifact artifact = new DefaultArtifact(dependency);
DependencyFilter classpathFilter = DependencyFilterUtils.classpathFilter(JavaScopes.COMPILE);
PatternExclusionsDependencyFilter exclusionFilter =
new PatternExclusionsDependencyFilter(excludes);
CollectRequest collectRequest = new CollectRequest();
collectRequest.setRoot(new Dependency(artifact, JavaScopes.COMPILE));
synchronized (repos) {
for (RemoteRepository repo : repos) {
collectRequest.addRepository(repo);
}
}
DependencyRequest dependencyRequest = new DependencyRequest(collectRequest,
DependencyFilterUtils.andFilter(exclusionFilter, classpathFilter));
try {
return system.resolveDependencies(session, dependencyRequest).getArtifactResults();
} catch (NullPointerException ex) {
throw new RepositoryException(String.format("Cannot fetch dependencies for %s", dependency));
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/DependencyContext.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/DependencyContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.dep;
import java.io.File;
import java.net.MalformedURLException;
import java.util.LinkedList;
import java.util.List;
import org.sonatype.aether.RepositorySystem;
import org.sonatype.aether.RepositorySystemSession;
import org.sonatype.aether.artifact.Artifact;
import org.sonatype.aether.collection.CollectRequest;
import org.sonatype.aether.graph.DependencyFilter;
import org.sonatype.aether.repository.RemoteRepository;
import org.sonatype.aether.resolution.ArtifactResolutionException;
import org.sonatype.aether.resolution.ArtifactResult;
import org.sonatype.aether.resolution.DependencyRequest;
import org.sonatype.aether.resolution.DependencyResolutionException;
import org.sonatype.aether.util.artifact.DefaultArtifact;
import org.sonatype.aether.util.artifact.JavaScopes;
import org.sonatype.aether.util.filter.DependencyFilterUtils;
import org.sonatype.aether.util.filter.PatternExclusionsDependencyFilter;
/**
*
*/
public class DependencyContext {
List<Dependency> dependencies = new LinkedList<>();
List<Repository> repositories = new LinkedList<>();
List<File> files = new LinkedList<>();
List<File> filesDist = new LinkedList<>();
private RepositorySystem system = Booter.newRepositorySystem();
private RepositorySystemSession session;
private RemoteRepository mavenCentral = Booter.newCentralRepository();
private RemoteRepository mavenLocal = Booter.newLocalRepository();
public DependencyContext(String localRepoPath) {
session = Booter.newRepositorySystemSession(system, localRepoPath);
}
public Dependency load(String lib) {
Dependency dep = new Dependency(lib);
if (dependencies.contains(dep)) {
dependencies.remove(dep);
}
dependencies.add(dep);
return dep;
}
public Repository addRepo(String name) {
Repository rep = new Repository(name);
repositories.add(rep);
return rep;
}
public void reset() {
dependencies = new LinkedList<>();
repositories = new LinkedList<>();
files = new LinkedList<>();
filesDist = new LinkedList<>();
}
/**
* fetch all artifacts
* @return
* @throws MalformedURLException
* @throws ArtifactResolutionException
* @throws DependencyResolutionException
*/
public List<File> fetch() throws MalformedURLException,
DependencyResolutionException, ArtifactResolutionException {
for (Dependency dep : dependencies) {
if (!dep.isLocalFsArtifact()) {
List<ArtifactResult> artifacts = fetchArtifactWithDep(dep);
for (ArtifactResult artifact : artifacts) {
if (dep.isDist()) {
filesDist.add(artifact.getArtifact().getFile());
}
files.add(artifact.getArtifact().getFile());
}
} else {
if (dep.isDist()) {
filesDist.add(new File(dep.getGroupArtifactVersion()));
}
files.add(new File(dep.getGroupArtifactVersion()));
}
}
return files;
}
private List<ArtifactResult> fetchArtifactWithDep(Dependency dep)
throws DependencyResolutionException, ArtifactResolutionException {
Artifact artifact = new DefaultArtifact(dep.getGroupArtifactVersion());
DependencyFilter classpathFilter = DependencyFilterUtils
.classpathFilter(JavaScopes.COMPILE);
PatternExclusionsDependencyFilter exclusionFilter = new PatternExclusionsDependencyFilter(
dep.getExclusions());
CollectRequest collectRequest = new CollectRequest();
collectRequest.setRoot(new org.sonatype.aether.graph.Dependency(artifact,
JavaScopes.COMPILE));
collectRequest.addRepository(mavenCentral);
collectRequest.addRepository(mavenLocal);
for (Repository repo : repositories) {
RemoteRepository rr = new RemoteRepository(repo.getId(), "default", repo.getUrl());
rr.setPolicy(repo.isSnapshot(), null);
collectRequest.addRepository(rr);
}
DependencyRequest dependencyRequest = new DependencyRequest(collectRequest,
DependencyFilterUtils.andFilter(exclusionFilter, classpathFilter));
return system.resolveDependencies(session, dependencyRequest).getArtifactResults();
}
public List<File> getFiles() {
return files;
}
public List<File> getFilesDist() {
return filesDist;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/RepositoryListener.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/RepositoryListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.dep;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sonatype.aether.AbstractRepositoryListener;
import org.sonatype.aether.RepositoryEvent;
/**
* Simple listener that print log.
*/
public class RepositoryListener extends AbstractRepositoryListener {
Logger logger = LoggerFactory.getLogger(RepositoryListener.class);
public RepositoryListener() {}
@Override
public void artifactDeployed(RepositoryEvent event) {
logger.info("Deployed " + event.getArtifact() + " to " + event.getRepository());
}
@Override
public void artifactDeploying(RepositoryEvent event) {
logger.info("Deploying " + event.getArtifact() + " to " + event.getRepository());
}
@Override
public void artifactDescriptorInvalid(RepositoryEvent event) {
logger.info("Invalid artifact descriptor for " + event.getArtifact() + ": "
+ event.getException().getMessage());
}
@Override
public void artifactDescriptorMissing(RepositoryEvent event) {
logger.info("Missing artifact descriptor for " + event.getArtifact());
}
@Override
public void artifactInstalled(RepositoryEvent event) {
logger.info("Installed " + event.getArtifact() + " to " + event.getFile());
}
@Override
public void artifactInstalling(RepositoryEvent event) {
logger.info("Installing " + event.getArtifact() + " to " + event.getFile());
}
@Override
public void artifactResolved(RepositoryEvent event) {
logger.info("Resolved artifact " + event.getArtifact() + " from " + event.getRepository());
}
@Override
public void artifactDownloading(RepositoryEvent event) {
logger.info("Downloading artifact " + event.getArtifact() + " from " + event.getRepository());
}
@Override
public void artifactDownloaded(RepositoryEvent event) {
logger.info("Downloaded artifact " + event.getArtifact() + " from " + event.getRepository());
}
@Override
public void artifactResolving(RepositoryEvent event) {
logger.info("Resolving artifact " + event.getArtifact());
}
@Override
public void metadataDeployed(RepositoryEvent event) {
logger.info("Deployed " + event.getMetadata() + " to " + event.getRepository());
}
@Override
public void metadataDeploying(RepositoryEvent event) {
logger.info("Deploying " + event.getMetadata() + " to " + event.getRepository());
}
@Override
public void metadataInstalled(RepositoryEvent event) {
logger.info("Installed " + event.getMetadata() + " to " + event.getFile());
}
@Override
public void metadataInstalling(RepositoryEvent event) {
logger.info("Installing " + event.getMetadata() + " to " + event.getFile());
}
@Override
public void metadataInvalid(RepositoryEvent event) {
logger.info("Invalid metadata " + event.getMetadata());
}
@Override
public void metadataResolved(RepositoryEvent event) {
logger.info("Resolved metadata " + event.getMetadata() + " from " + event.getRepository());
}
@Override
public void metadataResolving(RepositoryEvent event) {
logger.info("Resolving metadata " + event.getMetadata() + " from " + event.getRepository());
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/Booter.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/Booter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.dep;
import org.apache.commons.lang.Validate;
import org.apache.maven.repository.internal.MavenRepositorySystemSession;
import org.sonatype.aether.RepositorySystem;
import org.sonatype.aether.RepositorySystemSession;
import org.sonatype.aether.repository.LocalRepository;
import org.sonatype.aether.repository.RemoteRepository;
import java.nio.file.Paths;
/**
* Manage mvn repository.
*/
public class Booter {
public static RepositorySystem newRepositorySystem() {
return RepositorySystemFactory.newRepositorySystem();
}
public static RepositorySystemSession newRepositorySystemSession(
RepositorySystem system, String localRepoPath) {
Validate.notNull(localRepoPath, "localRepoPath should have a value");
MavenRepositorySystemSession session = new MavenRepositorySystemSession();
LocalRepository localRepo = new LocalRepository(resolveLocalRepoPath(localRepoPath));
session.setLocalRepositoryManager(system.newLocalRepositoryManager(localRepo));
// session.setTransferListener(new ConsoleTransferListener());
// session.setRepositoryListener(new ConsoleRepositoryListener());
// uncomment to generate dirty trees
// session.setDependencyGraphTransformer( null );
return session;
}
static String resolveLocalRepoPath(String localRepoPath) {
// todo decouple home folder resolution
// find homedir
String home = System.getenv("ZEPPELIN_HOME");
if (home == null) {
home = System.getProperty("zeppelin.home");
}
if (home == null) {
home = "..";
}
return Paths.get(home).resolve(localRepoPath).toAbsolutePath().toString();
}
public static RemoteRepository newCentralRepository() {
String mvnRepo = System.getenv("ZEPPELIN_INTERPRETER_DEP_MVNREPO");
if (mvnRepo == null) {
mvnRepo = System.getProperty("zeppelin.interpreter.dep.mvnRepo");
}
if (mvnRepo == null) {
mvnRepo = "http://repo1.maven.org/maven2/";
}
return new RemoteRepository("central", "default", mvnRepo);
}
public static RemoteRepository newLocalRepository() {
return new RemoteRepository("local",
"default", "file://" + System.getProperty("user.home") + "/.m2/repository");
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/AbstractDependencyResolver.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/AbstractDependencyResolver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.dep;
import java.net.URL;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.sonatype.aether.RepositorySystem;
import org.sonatype.aether.RepositorySystemSession;
import org.sonatype.aether.repository.Authentication;
import org.sonatype.aether.repository.Proxy;
import org.sonatype.aether.repository.RemoteRepository;
import org.sonatype.aether.repository.RepositoryPolicy;
import org.sonatype.aether.resolution.ArtifactResult;
/**
* Abstract dependency resolver.
* Add new dependencies from mvn repo (at runtime) Zeppelin.
*/
public abstract class AbstractDependencyResolver {
protected RepositorySystem system = Booter.newRepositorySystem();
protected List<RemoteRepository> repos = new LinkedList<>();
protected RepositorySystemSession session;
public AbstractDependencyResolver(String localRepoPath) {
session = Booter.newRepositorySystemSession(system, localRepoPath);
repos.add(Booter.newCentralRepository()); // add maven central
repos.add(Booter.newLocalRepository());
}
public void setProxy(URL proxyUrl, String proxyUser, String proxyPassword) {
Authentication auth = new Authentication(proxyUser, proxyPassword);
Proxy proxy = new Proxy(proxyUrl.getProtocol(), proxyUrl.getHost(), proxyUrl.getPort(), auth);
synchronized (repos) {
for (RemoteRepository repo : repos) {
repo.setProxy(proxy);
}
}
}
public List<RemoteRepository> getRepos() {
return this.repos;
}
public void addRepo(String id, String url, boolean snapshot) {
synchronized (repos) {
delRepo(id);
RemoteRepository rr = new RemoteRepository(id, "default", url);
rr.setPolicy(snapshot, new RepositoryPolicy(
true,
RepositoryPolicy.UPDATE_POLICY_DAILY,
RepositoryPolicy.CHECKSUM_POLICY_WARN));
repos.add(rr);
}
}
public void addRepo(String id, String url, boolean snapshot, Authentication auth, Proxy proxy) {
synchronized (repos) {
delRepo(id);
RemoteRepository rr = new RemoteRepository(id, "default", url);
rr.setPolicy(snapshot, new RepositoryPolicy(
true,
RepositoryPolicy.UPDATE_POLICY_DAILY,
RepositoryPolicy.CHECKSUM_POLICY_WARN));
rr.setAuthentication(auth);
rr.setProxy(proxy);
repos.add(rr);
}
}
public RemoteRepository delRepo(String id) {
synchronized (repos) {
Iterator<RemoteRepository> it = repos.iterator();
while (it.hasNext()) {
RemoteRepository repo = it.next();
if (repo.getId().equals(id)) {
it.remove();
return repo;
}
}
}
return null;
}
public abstract List<ArtifactResult> getArtifactsWithDep(String dependency,
Collection<String> excludes) throws Exception;
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/Dependency.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/Dependency.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.dep;
import java.util.LinkedList;
import java.util.List;
/**
*
*/
public class Dependency {
private String groupArtifactVersion;
private boolean local = false;
private List<String> exclusions;
public Dependency(String groupArtifactVersion) {
this.groupArtifactVersion = groupArtifactVersion;
exclusions = new LinkedList<>();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Dependency)) {
return false;
} else {
return ((Dependency) o).groupArtifactVersion.equals(groupArtifactVersion);
}
}
/**
* Don't add artifact into SparkContext (sc.addJar())
* @return
*/
public Dependency local() {
local = true;
return this;
}
public Dependency excludeAll() {
exclude("*");
return this;
}
/**
*
* @param exclusions comma or newline separated list of "groupId:ArtifactId"
* @return
*/
public Dependency exclude(String exclusions) {
for (String item : exclusions.split(",|\n")) {
this.exclusions.add(item);
}
return this;
}
public String getGroupArtifactVersion() {
return groupArtifactVersion;
}
public boolean isDist() {
return !local;
}
public List<String> getExclusions() {
return exclusions;
}
public boolean isLocalFsArtifact() {
int numSplits = groupArtifactVersion.split(":").length;
return !(numSplits >= 3 && numSplits <= 6);
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/annotation/Experimental.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/annotation/Experimental.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Experimental API
* Might change or be removed at anytime, or be adopted as ZeppelinApi
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER,
ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
public @interface Experimental {
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/annotation/ZeppelinApi.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/annotation/ZeppelinApi.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* APIs exposed to extends pluggable components or exposed to enduser
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER,
ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
public @interface ZeppelinApi {
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterHookRegistry.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterHookRegistry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.util.HashMap;
import java.util.Map;
/**
* The InterpreterinterpreterHookRegistry specifies code to be conditionally executed by an
* interpreter. The constants defined in this class denote currently
* supported events. Each instance is bound to a single InterpreterGroup.
* Scope is determined on a per-note basis (except when null for global scope).
*/
public class InterpreterHookRegistry {
public static final String GLOBAL_KEY = "_GLOBAL_";
private String interpreterId;
private Map<String, Map<String, Map<String, String>>> registry = new HashMap<>();
/**
* hookRegistry constructor.
*
* @param interpreterId The Id of the InterpreterGroup instance to bind to
*/
public InterpreterHookRegistry(final String interpreterId) {
this.interpreterId = interpreterId;
}
/**
* Get the interpreterGroup id this instance is bound to
*/
public String getInterpreterId() {
return interpreterId;
}
/**
* Adds a note to the registry
*
* @param noteId The Id of the Note instance to add
*/
public void addNote(String noteId) {
synchronized (registry) {
if (registry.get(noteId) == null) {
registry.put(noteId, new HashMap<String, Map<String, String>>());
}
}
}
/**
* Adds a className to the registry
*
* @param noteId The note id
* @param className The name of the interpreter repl to map the hooks to
*/
public void addRepl(String noteId, String className) {
synchronized (registry) {
addNote(noteId);
if (registry.get(noteId).get(className) == null) {
registry.get(noteId).put(className, new HashMap<String, String>());
}
}
}
/**
* Register a hook for a specific event.
*
* @param noteId Denotes the note this instance belongs to
* @param className The name of the interpreter repl to map the hooks to
* @param event hook event (see constants defined in this class)
* @param cmd Code to be executed by the interpreter
*/
public void register(String noteId, String className,
String event, String cmd) throws IllegalArgumentException {
synchronized (registry) {
if (noteId == null) {
noteId = GLOBAL_KEY;
}
addRepl(noteId, className);
if (!event.equals(HookType.POST_EXEC) && !event.equals(HookType.PRE_EXEC) &&
!event.equals(HookType.POST_EXEC_DEV) && !event.equals(HookType.PRE_EXEC_DEV)) {
throw new IllegalArgumentException("Must be " + HookType.POST_EXEC + ", " +
HookType.POST_EXEC_DEV + ", " +
HookType.PRE_EXEC + " or " +
HookType.PRE_EXEC_DEV);
}
registry.get(noteId).get(className).put(event, cmd);
}
}
/**
* Unregister a hook for a specific event.
*
* @param noteId Denotes the note this instance belongs to
* @param className The name of the interpreter repl to map the hooks to
* @param event hook event (see constants defined in this class)
*/
public void unregister(String noteId, String className, String event) {
synchronized (registry) {
if (noteId == null) {
noteId = GLOBAL_KEY;
}
addRepl(noteId, className);
registry.get(noteId).get(className).remove(event);
}
}
/**
* Get a hook for a specific event.
*
* @param noteId Denotes the note this instance belongs to
* @param className The name of the interpreter repl to map the hooks to
* @param event hook event (see constants defined in this class)
*/
public String get(String noteId, String className, String event) {
synchronized (registry) {
if (noteId == null) {
noteId = GLOBAL_KEY;
}
addRepl(noteId, className);
return registry.get(noteId).get(className).get(event);
}
}
/**
* Container for hook event type constants
*/
public static final class HookType {
// Execute the hook code PRIOR to main paragraph code execution
public static final String PRE_EXEC = "pre_exec";
// Execute the hook code AFTER main paragraph code execution
public static final String POST_EXEC = "post_exec";
// Same as above but reserved for interpreter developers, in order to allow
// notebook users to use the above without overwriting registry settings
// that are initialized directly in subclasses of Interpreter.
public static final String PRE_EXEC_DEV = "pre_exec_dev";
public static final String POST_EXEC_DEV = "post_exec_dev";
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterException.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
/**
* Runtime Exception for interpreters.
*
*/
public class InterpreterException extends RuntimeException {
public InterpreterException(Throwable e) {
super(e);
}
public InterpreterException(String m) {
super(m);
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterRunner.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterRunner.java | package org.apache.zeppelin.interpreter;
import com.google.gson.annotations.SerializedName;
/**
* Interpreter runner path
*/
public class InterpreterRunner {
@SerializedName("linux")
private String linuxPath;
@SerializedName("win")
private String winPath;
public String getPath() {
return System.getProperty("os.name").startsWith("Windows") ? winPath : linuxPath;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/RemoteWorksController.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/RemoteWorksController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.util.List;
/**
* zeppelin job for Remote works controller by interpreter
*
*/
public interface RemoteWorksController {
List<InterpreterContextRunner> getRemoteContextRunner(String noteId);
List<InterpreterContextRunner> getRemoteContextRunner(String noteId, String paragraphId);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResult.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResult.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.io.IOException;
import java.io.Serializable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* Interpreter result template.
*/
public class InterpreterResult implements Serializable {
transient Logger logger = LoggerFactory.getLogger(InterpreterResult.class);
/**
* Type of result after code execution.
*/
public static enum Code {
SUCCESS,
INCOMPLETE,
ERROR,
KEEP_PREVIOUS_RESULT
}
/**
* Type of Data.
*/
public static enum Type {
TEXT,
HTML,
ANGULAR,
TABLE,
IMG,
SVG,
NULL
}
Code code;
List<InterpreterResultMessage> msg = new LinkedList<>();
public InterpreterResult(Code code) {
this.code = code;
}
public InterpreterResult(Code code, List<InterpreterResultMessage> msgs) {
this.code = code;
msg.addAll(msgs);
}
public InterpreterResult(Code code, String msg) {
this.code = code;
add(msg);
}
public InterpreterResult(Code code, Type type, String msg) {
this.code = code;
add(type, msg);
}
/**
* Automatically detect %[display_system] directives
* @param msg
*/
public void add(String msg) {
InterpreterOutput out = new InterpreterOutput(null);
try {
out.write(msg);
out.flush();
this.msg.addAll(out.toInterpreterResultMessage());
out.close();
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
public void add(Type type, String data) {
msg.add(new InterpreterResultMessage(type, data));
}
public Code code() {
return code;
}
public List<InterpreterResultMessage> message() {
return msg;
}
public String toString() {
StringBuilder sb = new StringBuilder();
Type prevType = null;
for (InterpreterResultMessage m : msg) {
if (prevType != null) {
sb.append("\n");
if (prevType == Type.TABLE) {
sb.append("\n");
}
}
sb.append(m.toString());
prevType = m.getType();
}
return sb.toString();
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/WrappedInterpreter.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/WrappedInterpreter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
/**
* WrappedInterpreter
*/
public interface WrappedInterpreter {
public Interpreter getInnerInterpreter();
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutputChangeListener.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutputChangeListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.io.File;
/**
* InterpreterOutputChangeListener
*/
public interface InterpreterOutputChangeListener {
public void fileChanged(File file);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOption.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOption.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.util.ArrayList;
import java.util.List;
/**
*
*/
public class InterpreterOption {
public static final transient String SHARED = "shared";
public static final transient String SCOPED = "scoped";
public static final transient String ISOLATED = "isolated";
boolean remote;
String host = null;
int port = -1;
String perNote;
String perUser;
boolean isExistingProcess;
boolean setPermission;
List<String> users;
boolean isUserImpersonate;
public boolean isExistingProcess() {
return isExistingProcess;
}
public void setExistingProcess(boolean isExistingProcess) {
this.isExistingProcess = isExistingProcess;
}
public void setPort(int port) {
this.port = port;
}
public void setHost(String host) {
this.host = host;
}
public boolean permissionIsSet() {
return setPermission;
}
public void setUserPermission(boolean setPermission) {
this.setPermission = setPermission;
}
public List<String> getUsers() {
return users;
}
public boolean isUserImpersonate() {
return isUserImpersonate;
}
public void setUserImpersonate(boolean userImpersonate) {
isUserImpersonate = userImpersonate;
}
public InterpreterOption() {
this(false);
}
public InterpreterOption(boolean remote) {
this(remote, SHARED, SHARED);
}
public InterpreterOption(boolean remote, String perUser, String perNote) {
if (perUser == null) {
throw new NullPointerException("perUser can not be null.");
}
if (perNote == null) {
throw new NullPointerException("perNote can not be null.");
}
this.remote = remote;
this.perUser = perUser;
this.perNote = perNote;
}
public static InterpreterOption fromInterpreterOption(InterpreterOption other) {
InterpreterOption option = new InterpreterOption();
option.remote = other.remote;
option.host = other.host;
option.port = other.port;
option.perNote = other.perNote;
option.perUser = other.perUser;
option.isExistingProcess = other.isExistingProcess;
option.setPermission = other.setPermission;
option.users = (null == other.users) ?
new ArrayList<String>() : new ArrayList<>(other.users);
return option;
}
public boolean isRemote() {
return remote;
}
public void setRemote(boolean remote) {
this.remote = remote;
}
public String getHost() {
return host;
}
public int getPort() {
return port;
}
public boolean perUserShared() {
return SHARED.equals(perUser);
}
public boolean perUserScoped() {
return SCOPED.equals(perUser);
}
public boolean perUserIsolated() {
return ISOLATED.equals(perUser);
}
public boolean perNoteShared() {
return SHARED.equals(perNote);
}
public boolean perNoteScoped() {
return SCOPED.equals(perNote);
}
public boolean perNoteIsolated() {
return ISOLATED.equals(perNote);
}
public boolean isProcess() {
return perUserIsolated() || perNoteIsolated();
}
public boolean isSession() {
return perUserScoped() || perNoteScoped();
}
public void setPerNote(String perNote) {
this.perNote = perNote;
}
public void setPerUser(String perUser) {
this.perUser = perUser;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Interpreter.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Interpreter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.zeppelin.annotation.ZeppelinApi;
import org.apache.zeppelin.annotation.Experimental;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.scheduler.Scheduler;
import org.apache.zeppelin.scheduler.SchedulerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Interface for interpreters.
* If you want to implement new Zeppelin interpreter, extend this class
*
* Please see,
* https://zeppelin.apache.org/docs/latest/development/writingzeppelininterpreter.html
*
* open(), close(), interpret() is three the most important method you need to implement.
* cancel(), getProgress(), completion() is good to have
* getFormType(), getScheduler() determine Zeppelin's behavior
*/
public abstract class Interpreter {
/**
* Opens interpreter. You may want to place your initialize routine here.
* open() is called only once
*/
@ZeppelinApi
public abstract void open();
/**
* Closes interpreter. You may want to free your resources up here.
* close() is called only once
*/
@ZeppelinApi
public abstract void close();
/**
* Run code and return result, in synchronous way.
*
* @param st statements to run
*/
@ZeppelinApi
public abstract InterpreterResult interpret(String st, InterpreterContext context);
/**
* Optionally implement the canceling routine to abort interpret() method
*/
@ZeppelinApi
public abstract void cancel(InterpreterContext context);
/**
* Dynamic form handling
* see http://zeppelin.apache.org/docs/dynamicform.html
*
* @return FormType.SIMPLE enables simple pattern replacement (eg. Hello ${name=world}),
* FormType.NATIVE handles form in API
*/
@ZeppelinApi
public abstract FormType getFormType();
/**
* get interpret() method running process in percentage.
*
* @return number between 0-100
*/
@ZeppelinApi
public abstract int getProgress(InterpreterContext context);
/**
* Get completion list based on cursor position.
* By implementing this method, it enables auto-completion.
*
* @param buf statements
* @param cursor cursor position in statements
* @return list of possible completion. Return empty list if there're nothing to return.
*/
@ZeppelinApi
public List<InterpreterCompletion> completion(String buf, int cursor) {
return null;
}
/**
* Interpreter can implements it's own scheduler by overriding this method.
* There're two default scheduler provided, FIFO, Parallel.
* If your interpret() can handle concurrent request, use Parallel or use FIFO.
*
* You can get default scheduler by using
* SchedulerFactory.singleton().createOrGetFIFOScheduler()
* SchedulerFactory.singleton().createOrGetParallelScheduler()
*
* @return return scheduler instance. This method can be called multiple times and have to return
* the same instance. Can not return null.
*/
@ZeppelinApi
public Scheduler getScheduler() {
return SchedulerFactory.singleton().createOrGetFIFOScheduler("interpreter_" + this.hashCode());
}
public static Logger logger = LoggerFactory.getLogger(Interpreter.class);
private InterpreterGroup interpreterGroup;
private URL[] classloaderUrls;
protected Properties property;
private String userName;
@ZeppelinApi
public Interpreter(Properties property) {
logger.debug("Properties: {}", property);
this.property = property;
}
public void setProperty(Properties property) {
this.property = property;
}
@ZeppelinApi
public Properties getProperty() {
Properties p = new Properties();
p.putAll(property);
RegisteredInterpreter registeredInterpreter = Interpreter.findRegisteredInterpreterByClassName(
getClassName());
if (null != registeredInterpreter) {
Map<String, InterpreterProperty> defaultProperties = registeredInterpreter.getProperties();
for (String k : defaultProperties.keySet()) {
if (!p.containsKey(k)) {
String value = defaultProperties.get(k).getValue();
if (value != null) {
p.put(k, defaultProperties.get(k).getValue());
}
}
}
}
return p;
}
@ZeppelinApi
public String getProperty(String key) {
logger.debug("key: {}, value: {}", key, getProperty().getProperty(key));
return getProperty().getProperty(key);
}
public String getClassName() {
return this.getClass().getName();
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getUserName() {
return this.userName;
}
public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
this.interpreterGroup = interpreterGroup;
}
@ZeppelinApi
public InterpreterGroup getInterpreterGroup() {
return this.interpreterGroup;
}
public URL[] getClassloaderUrls() {
return classloaderUrls;
}
public void setClassloaderUrls(URL[] classloaderUrls) {
this.classloaderUrls = classloaderUrls;
}
/**
* General function to register hook event
*
* @param noteId - Note to bind hook to
* @param event The type of event to hook to (pre_exec, post_exec)
* @param cmd The code to be executed by the interpreter on given event
*/
@Experimental
public void registerHook(String noteId, String event, String cmd) {
InterpreterHookRegistry hooks = interpreterGroup.getInterpreterHookRegistry();
String className = getClassName();
hooks.register(noteId, className, event, cmd);
}
/**
* registerHook() wrapper for global scope
*
* @param event The type of event to hook to (pre_exec, post_exec)
* @param cmd The code to be executed by the interpreter on given event
*/
@Experimental
public void registerHook(String event, String cmd) {
registerHook(null, event, cmd);
}
/**
* Get the hook code
*
* @param noteId - Note to bind hook to
* @param event The type of event to hook to (pre_exec, post_exec)
*/
@Experimental
public String getHook(String noteId, String event) {
InterpreterHookRegistry hooks = interpreterGroup.getInterpreterHookRegistry();
String className = getClassName();
return hooks.get(noteId, className, event);
}
/**
* getHook() wrapper for global scope
*
* @param event The type of event to hook to (pre_exec, post_exec)
*/
@Experimental
public String getHook(String event) {
return getHook(null, event);
}
/**
* Unbind code from given hook event
*
* @param noteId - Note to bind hook to
* @param event The type of event to hook to (pre_exec, post_exec)
*/
@Experimental
public void unregisterHook(String noteId, String event) {
InterpreterHookRegistry hooks = interpreterGroup.getInterpreterHookRegistry();
String className = getClassName();
hooks.unregister(noteId, className, event);
}
/**
* unregisterHook() wrapper for global scope
*
* @param event The type of event to hook to (pre_exec, post_exec)
*/
@Experimental
public void unregisterHook(String event) {
unregisterHook(null, event);
}
@ZeppelinApi
public Interpreter getInterpreterInTheSameSessionByClassName(String className) {
synchronized (interpreterGroup) {
for (List<Interpreter> interpreters : interpreterGroup.values()) {
boolean belongsToSameNoteGroup = false;
Interpreter interpreterFound = null;
for (Interpreter intp : interpreters) {
if (intp.getClassName().equals(className)) {
interpreterFound = intp;
}
Interpreter p = intp;
while (p instanceof WrappedInterpreter) {
p = ((WrappedInterpreter) p).getInnerInterpreter();
}
if (this == p) {
belongsToSameNoteGroup = true;
}
}
if (belongsToSameNoteGroup) {
return interpreterFound;
}
}
}
return null;
}
/**
* Type of interpreter.
*/
public static enum FormType {
NATIVE, SIMPLE, NONE
}
/**
* Represent registered interpreter class
*/
public static class RegisteredInterpreter {
private String group;
private String name;
private String className;
private boolean defaultInterpreter;
private Map<String, InterpreterProperty> properties;
private Map<String, Object> editor;
private String path;
private InterpreterOption option;
private InterpreterRunner runner;
public RegisteredInterpreter(String name, String group, String className,
Map<String, InterpreterProperty> properties) {
this(name, group, className, false, properties);
}
public RegisteredInterpreter(String name, String group, String className,
boolean defaultInterpreter, Map<String, InterpreterProperty> properties) {
super();
this.name = name;
this.group = group;
this.className = className;
this.defaultInterpreter = defaultInterpreter;
this.properties = properties;
this.editor = new HashMap<>();
}
public String getName() {
return name;
}
public String getGroup() {
return group;
}
public String getClassName() {
return className;
}
public boolean isDefaultInterpreter() {
return defaultInterpreter;
}
public void setDefaultInterpreter(boolean defaultInterpreter) {
this.defaultInterpreter = defaultInterpreter;
}
public Map<String, InterpreterProperty> getProperties() {
return properties;
}
public Map<String, Object> getEditor() {
return editor;
}
public void setPath(String path) {
this.path = path;
}
public String getPath() {
return path;
}
public String getInterpreterKey() {
return getGroup() + "." + getName();
}
public InterpreterOption getOption() {
return option;
}
public InterpreterRunner getRunner() {
return runner;
}
}
/**
* Type of Scheduling.
*/
public static enum SchedulingMode {
FIFO, PARALLEL
}
public static Map<String, RegisteredInterpreter> registeredInterpreters = Collections
.synchronizedMap(new HashMap<String, RegisteredInterpreter>());
@Deprecated
public static void register(String name, String group, String className,
Map<String, InterpreterProperty> properties) {
register(name, group, className, false, properties);
}
@Deprecated
public static void register(String name, String group, String className,
boolean defaultInterpreter, Map<String, InterpreterProperty> properties) {
logger.warn("Static initialization is deprecated for interpreter {}, You should change it " +
"to use interpreter-setting.json in your jar or " +
"interpreter/{interpreter}/interpreter-setting.json", name);
register(new RegisteredInterpreter(name, group, className, defaultInterpreter, properties));
}
@Deprecated
public static void register(RegisteredInterpreter registeredInterpreter) {
String interpreterKey = registeredInterpreter.getInterpreterKey();
if (!registeredInterpreters.containsKey(interpreterKey)) {
registeredInterpreters.put(interpreterKey, registeredInterpreter);
} else {
RegisteredInterpreter existInterpreter = registeredInterpreters.get(interpreterKey);
if (!existInterpreter.getProperties().equals(registeredInterpreter.getProperties())) {
logger.error("exist registeredInterpreter with the same key but has different settings.");
}
}
}
public static RegisteredInterpreter findRegisteredInterpreterByClassName(String className) {
for (RegisteredInterpreter ri : registeredInterpreters.values()) {
if (ri.getClassName().equals(className)) {
return ri;
}
}
return null;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterContextRunner.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterContextRunner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
/**
*/
public abstract class InterpreterContextRunner implements Runnable {
String noteId;
private String paragraphId;
public InterpreterContextRunner(String noteId, String paragraphId) {
this.noteId = noteId;
this.paragraphId = paragraphId;
}
@Override
public boolean equals(Object o) {
if (o instanceof InterpreterContextRunner) {
InterpreterContextRunner io = ((InterpreterContextRunner) o);
if (io.getParagraphId().equals(paragraphId) &&
io.getNoteId().equals(noteId)) {
return true;
} else {
return false;
}
} else {
return false;
}
}
@Override
public abstract void run();
public String getNoteId() {
return noteId;
}
public String getParagraphId() {
return paragraphId;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResultMessageOutputListener.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResultMessageOutputListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
/**
* InterpreterResultMessage update events
*/
public interface InterpreterResultMessageOutputListener {
/**
* called when newline is detected
* @param line
*/
public void onAppend(InterpreterResultMessageOutput out, byte[] line);
/**
* when entire output is updated. eg) after detecting new display system
*/
public void onUpdate(InterpreterResultMessageOutput out);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterGroup.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterGroup.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcess;
import org.apache.zeppelin.resource.ResourcePool;
import org.apache.zeppelin.scheduler.Scheduler;
import org.apache.zeppelin.scheduler.SchedulerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* InterpreterGroup is list of interpreters in the same interpreter group.
* For example spark, pyspark, sql interpreters are in the same 'spark' group
* and InterpreterGroup will have reference to these all interpreters.
*
* Remember, list of interpreters are dedicated to a session. Session could be shared across user
* or notes, so the sessionId could be user or noteId or their combination.
* So InterpreterGroup internally manages map of [interpreterSessionKey(noteId, user, or
* their combination), list of interpreters]
*
* A InterpreterGroup runs on interpreter process.
* And unit of interpreter instantiate, restart, bind, unbind.
*/
public class InterpreterGroup extends ConcurrentHashMap<String, List<Interpreter>> {
String id;
private static final Logger LOGGER = LoggerFactory.getLogger(InterpreterGroup.class);
AngularObjectRegistry angularObjectRegistry;
InterpreterHookRegistry hookRegistry;
RemoteInterpreterProcess remoteInterpreterProcess; // attached remote interpreter process
ResourcePool resourcePool;
boolean angularRegistryPushed = false;
// map [notebook session, Interpreters in the group], to support per note session interpreters
//Map<String, List<Interpreter>> interpreters = new ConcurrentHashMap<String,
// List<Interpreter>>();
private static final Map<String, InterpreterGroup> allInterpreterGroups =
new ConcurrentHashMap<>();
public static InterpreterGroup getByInterpreterGroupId(String id) {
return allInterpreterGroups.get(id);
}
public static Collection<InterpreterGroup> getAll() {
return new LinkedList(allInterpreterGroups.values());
}
/**
* Create InterpreterGroup with given id
* @param id
*/
public InterpreterGroup(String id) {
this.id = id;
allInterpreterGroups.put(id, this);
}
/**
* Create InterpreterGroup with autogenerated id
*/
public InterpreterGroup() {
getId();
allInterpreterGroups.put(id, this);
}
private static String generateId() {
return "InterpreterGroup_" + System.currentTimeMillis() + "_"
+ new Random().nextInt();
}
public String getId() {
synchronized (this) {
if (id == null) {
id = generateId();
}
return id;
}
}
/**
* Get combined property of all interpreters in this group
* @return
*/
public Properties getProperty() {
Properties p = new Properties();
for (List<Interpreter> intpGroupForASession : this.values()) {
for (Interpreter intp : intpGroupForASession) {
p.putAll(intp.getProperty());
}
// it's okay to break here while every List<Interpreters> will have the same property set
break;
}
return p;
}
public AngularObjectRegistry getAngularObjectRegistry() {
return angularObjectRegistry;
}
public void setAngularObjectRegistry(AngularObjectRegistry angularObjectRegistry) {
this.angularObjectRegistry = angularObjectRegistry;
}
public InterpreterHookRegistry getInterpreterHookRegistry() {
return hookRegistry;
}
public void setInterpreterHookRegistry(InterpreterHookRegistry hookRegistry) {
this.hookRegistry = hookRegistry;
}
public RemoteInterpreterProcess getRemoteInterpreterProcess() {
return remoteInterpreterProcess;
}
public void setRemoteInterpreterProcess(RemoteInterpreterProcess remoteInterpreterProcess) {
this.remoteInterpreterProcess = remoteInterpreterProcess;
}
/**
* Close all interpreter instances in this group
*/
public void close() {
LOGGER.info("Close interpreter group " + getId());
List<Interpreter> intpToClose = new LinkedList<>();
for (List<Interpreter> intpGroupForSession : this.values()) {
intpToClose.addAll(intpGroupForSession);
}
close(intpToClose);
// make sure remote interpreter process terminates
if (remoteInterpreterProcess != null) {
while (remoteInterpreterProcess.referenceCount() > 0) {
remoteInterpreterProcess.dereference();
}
remoteInterpreterProcess = null;
}
allInterpreterGroups.remove(id);
}
/**
* Close all interpreter instances in this group for the session
* @param sessionId
*/
public void close(String sessionId) {
LOGGER.info("Close interpreter group " + getId() + " for session: " + sessionId);
final List<Interpreter> intpForSession = this.get(sessionId);
close(intpForSession);
}
private void close(final Collection<Interpreter> intpToClose) {
close(null, null, null, intpToClose);
}
public void close(final Map<String, InterpreterGroup> interpreterGroupRef,
final String processKey, final String sessionKey) {
LOGGER.info("Close interpreter group " + getId() + " for session: " + sessionKey);
close(interpreterGroupRef, processKey, sessionKey, this.get(sessionKey));
}
private void close(final Map<String, InterpreterGroup> interpreterGroupRef,
final String processKey, final String sessionKey, final Collection<Interpreter> intpToClose) {
if (intpToClose == null) {
return;
}
Thread t = new Thread() {
public void run() {
for (Interpreter interpreter : intpToClose) {
Scheduler scheduler = interpreter.getScheduler();
interpreter.close();
if (null != scheduler) {
SchedulerFactory.singleton().removeScheduler(scheduler.getName());
}
}
if (remoteInterpreterProcess != null) {
//TODO(jl): Because interpreter.close() runs as a seprate thread, we cannot guarantee
// refernceCount is a proper value. And as the same reason, we must not call
// remoteInterpreterProcess.dereference twice - this method also be called by
// interpreter.close().
// remoteInterpreterProcess.dereference();
if (remoteInterpreterProcess.referenceCount() <= 0) {
remoteInterpreterProcess = null;
allInterpreterGroups.remove(id);
}
}
// TODO(jl): While closing interpreters in a same session, we should remove after all
// interpreters are removed. OMG. It's too dirty!!
if (null != interpreterGroupRef && null != processKey && null != sessionKey) {
InterpreterGroup interpreterGroup = interpreterGroupRef.get(processKey);
if (1 == interpreterGroup.size() && interpreterGroup.containsKey(sessionKey)) {
interpreterGroupRef.remove(processKey);
} else {
interpreterGroup.remove(sessionKey);
}
}
}
};
t.start();
try {
t.join();
} catch (InterruptedException e) {
LOGGER.error("Can't close interpreter: {}", getId(), e);
}
}
/**
* Close all interpreter instances in this group
*/
public void shutdown() {
LOGGER.info("Close interpreter group " + getId());
// make sure remote interpreter process terminates
if (remoteInterpreterProcess != null) {
while (remoteInterpreterProcess.referenceCount() > 0) {
remoteInterpreterProcess.dereference();
}
remoteInterpreterProcess = null;
}
allInterpreterGroups.remove(id);
List<Interpreter> intpToClose = new LinkedList<>();
for (List<Interpreter> intpGroupForSession : this.values()) {
intpToClose.addAll(intpGroupForSession);
}
close(intpToClose);
}
public void setResourcePool(ResourcePool resourcePool) {
this.resourcePool = resourcePool;
}
public ResourcePool getResourcePool() {
return resourcePool;
}
public boolean isAngularRegistryPushed() {
return angularRegistryPushed;
}
public void setAngularRegistryPushed(boolean angularRegistryPushed) {
this.angularRegistryPushed = angularRegistryPushed;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterHookListener.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterHookListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
/**
* An interface for processing custom callback code into the interpreter.
*/
public interface InterpreterHookListener {
/**
* Prepends pre-execute hook code to the script that will be interpreted
*/
public void onPreExecute(String script);
/**
* Appends post-execute hook code to the script that will be interpreted
*/
public void onPostExecute(String script);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.net.URL;
import java.util.List;
import java.util.Properties;
import org.apache.zeppelin.interpreter.remote.RemoteInterpreter;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.scheduler.Scheduler;
/**
* Interpreter wrapper for lazy initialization
*/
public class LazyOpenInterpreter
extends Interpreter
implements WrappedInterpreter {
private Interpreter intp;
volatile boolean opened = false;
public LazyOpenInterpreter(Interpreter intp) {
super(new Properties());
this.intp = intp;
}
@Override
public Interpreter getInnerInterpreter() {
return intp;
}
@Override
public void setProperty(Properties property) {
intp.setProperty(property);
}
@Override
public Properties getProperty() {
return intp.getProperty();
}
@Override
public String getProperty(String key) {
return intp.getProperty(key);
}
@Override
public synchronized void open() {
if (opened == true) {
return;
}
synchronized (intp) {
if (opened == false) {
intp.open();
opened = true;
}
}
}
@Override
public void close() {
synchronized (intp) {
if (opened == true) {
intp.close();
opened = false;
}
}
}
public boolean isOpen() {
synchronized (intp) {
return opened;
}
}
@Override
public InterpreterResult interpret(String st, InterpreterContext context) {
open();
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
try {
return intp.interpret(st, context);
} finally {
Thread.currentThread().setContextClassLoader(classLoader);
}
}
@Override
public void cancel(InterpreterContext context) {
open();
intp.cancel(context);
}
@Override
public FormType getFormType() {
return intp.getFormType();
}
@Override
public int getProgress(InterpreterContext context) {
if (opened) {
return intp.getProgress(context);
} else {
return 0;
}
}
@Override
public Scheduler getScheduler() {
return intp.getScheduler();
}
@Override
public List<InterpreterCompletion> completion(String buf, int cursor) {
open();
List completion = intp.completion(buf, cursor);
return completion;
}
@Override
public String getClassName() {
return intp.getClassName();
}
@Override
public InterpreterGroup getInterpreterGroup() {
return intp.getInterpreterGroup();
}
@Override
public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
intp.setInterpreterGroup(interpreterGroup);
}
@Override
public URL [] getClassloaderUrls() {
return intp.getClassloaderUrls();
}
@Override
public void setClassloaderUrls(URL [] urls) {
intp.setClassloaderUrls(urls);
}
@Override
public void registerHook(String noteId, String event, String cmd) {
intp.registerHook(noteId, event, cmd);
}
@Override
public void registerHook(String event, String cmd) {
intp.registerHook(event, cmd);
}
@Override
public String getHook(String noteId, String event) {
return intp.getHook(noteId, event);
}
@Override
public String getHook(String event) {
return intp.getHook(event);
}
@Override
public void unregisterHook(String noteId, String event) {
intp.unregisterHook(noteId, event);
}
@Override
public void unregisterHook(String event) {
intp.unregisterHook(event);
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/ClassloaderInterpreter.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/ClassloaderInterpreter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.net.URL;
import java.util.List;
import java.util.Properties;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.scheduler.Scheduler;
/**
* Add to the classpath interpreters.
*
*/
public class ClassloaderInterpreter
extends Interpreter
implements WrappedInterpreter {
private ClassLoader cl;
private Interpreter intp;
public ClassloaderInterpreter(Interpreter intp, ClassLoader cl) {
super(new Properties());
this.cl = cl;
this.intp = intp;
}
@Override
public Interpreter getInnerInterpreter() {
return intp;
}
public ClassLoader getClassloader() {
return cl;
}
@Override
public InterpreterResult interpret(String st, InterpreterContext context) {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
return intp.interpret(st, context);
} catch (InterpreterException e) {
throw e;
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public void open() {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
intp.open();
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public void close() {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
intp.close();
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public void cancel(InterpreterContext context) {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
intp.cancel(context);
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public FormType getFormType() {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
return intp.getFormType();
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public int getProgress(InterpreterContext context) {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
return intp.getProgress(context);
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public Scheduler getScheduler() {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
return intp.getScheduler();
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public List<InterpreterCompletion> completion(String buf, int cursor) {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
List completion = intp.completion(buf, cursor);
return completion;
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public String getClassName() {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
return intp.getClassName();
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
intp.setInterpreterGroup(interpreterGroup);
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public InterpreterGroup getInterpreterGroup() {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
return intp.getInterpreterGroup();
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public void setClassloaderUrls(URL [] urls) {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
intp.setClassloaderUrls(urls);
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public URL [] getClassloaderUrls() {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
return intp.getClassloaderUrls();
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public void setProperty(Properties property) {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
intp.setProperty(property);
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public Properties getProperty() {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
return intp.getProperty();
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
@Override
public String getProperty(String key) {
ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
try {
return intp.getProperty(key);
} catch (Exception e) {
throw new InterpreterException(e);
} finally {
cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(oldcl);
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutputListener.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutputListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
/**
* Listen InterpreterOutput buffer flush
*/
public interface InterpreterOutputListener {
/**
* update all message outputs
*/
public void onUpdateAll(InterpreterOutput out);
/**
* called when newline is detected
* @param index
* @param out
* @param line
*/
public void onAppend(int index, InterpreterResultMessageOutput out, byte[] line);
/**
* when entire output is updated. eg) after detecting new display system
* @param index
* @param out
*/
public void onUpdate(int index, InterpreterResultMessageOutput out);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterProperty.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterProperty.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
/**
* Represent property of interpreter
*/
public class InterpreterProperty {
String envName;
String propertyName;
String defaultValue;
String description;
public InterpreterProperty(String envName, String propertyName, String defaultValue,
String description) {
this.envName = envName;
this.propertyName = propertyName;
this.defaultValue = defaultValue;
this.description = description;
}
public InterpreterProperty(String defaultValue, String description) {
this(null, null, defaultValue, description);
}
public String getEnvName() {
return envName;
}
public void setEnvName(String envName) {
this.envName = envName;
}
public String getPropertyName() {
return propertyName;
}
public void setPropertyName(String propertyName) {
this.propertyName = propertyName;
}
public String getDefaultValue() {
return defaultValue;
}
public void setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public int hashCode() {
return this.toString().hashCode();
}
public boolean equals(Object o) {
if (o == null) return false;
return this.toString().equals(o.toString());
}
public String getValue() {
if (envName != null && !envName.isEmpty()) {
String envValue = System.getenv().get(envName);
if (envValue != null) {
return envValue;
}
}
if (propertyName != null && !propertyName.isEmpty()) {
String propValue = System.getProperty(propertyName);
if (propValue != null) {
return propValue;
}
}
return defaultValue;
}
@Override
public String toString() {
return String.format("{envName=%s, propertyName=%s, defaultValue=%s, description=%20s}",
envName, propertyName, defaultValue, description);
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutputChangeWatcher.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutputChangeWatcher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import static java.nio.file.StandardWatchEventKinds.ENTRY_CREATE;
import static java.nio.file.StandardWatchEventKinds.ENTRY_DELETE;
import static java.nio.file.StandardWatchEventKinds.ENTRY_MODIFY;
import static java.nio.file.StandardWatchEventKinds.OVERFLOW;
import java.io.File;
import java.io.IOException;
import java.nio.file.ClosedWatchServiceException;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.nio.file.WatchEvent;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Watch the change for the development mode support
*/
public class InterpreterOutputChangeWatcher extends Thread {
Logger logger = LoggerFactory.getLogger(InterpreterOutputChangeWatcher.class);
private WatchService watcher;
private final List<File> watchFiles = new LinkedList<>();
private final Map<WatchKey, File> watchKeys = new HashMap<>();
private InterpreterOutputChangeListener listener;
private boolean stop;
public InterpreterOutputChangeWatcher(InterpreterOutputChangeListener listener)
throws IOException {
watcher = FileSystems.getDefault().newWatchService();
this.listener = listener;
}
public void watch(File file) throws IOException {
String dirString;
if (file.isFile()) {
dirString = file.getParentFile().getAbsolutePath();
} else {
throw new IOException(file.getName() + " is not a file");
}
if (dirString == null) {
dirString = "/";
}
Path dir = FileSystems.getDefault().getPath(dirString);
logger.info("watch " + dir);
WatchKey key = dir.register(watcher, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY);
synchronized (watchKeys) {
watchKeys.put(key, new File(dirString));
watchFiles.add(file);
}
}
public void clear() {
synchronized (watchKeys) {
for (WatchKey key : watchKeys.keySet()) {
key.cancel();
}
watchKeys.clear();
watchFiles.clear();
}
}
public void shutdown() throws IOException {
stop = true;
clear();
watcher.close();
}
public void run() {
while (!stop) {
WatchKey key = null;
try {
key = watcher.poll(1, TimeUnit.SECONDS);
} catch (InterruptedException | ClosedWatchServiceException e) {
break;
}
if (key == null) {
continue;
}
for (WatchEvent<?> event : key.pollEvents()) {
WatchEvent.Kind<?> kind = event.kind();
if (kind == OVERFLOW) {
continue;
}
WatchEvent<Path> ev = (WatchEvent<Path>) event;
Path filename = ev.context();
// search for filename
synchronized (watchKeys) {
for (File f : watchFiles) {
if (f.getName().compareTo(filename.toString()) == 0) {
File changedFile;
if (filename.isAbsolute()) {
changedFile = new File(filename.toString());
} else {
changedFile = new File(watchKeys.get(key), filename.toString());
}
logger.info("File change detected " + changedFile.getAbsolutePath());
if (listener != null) {
listener.fileChanged(changedFile);
}
}
}
}
}
boolean valid = key.reset();
if (!valid) {
break;
}
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/RemoteZeppelinServerResource.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/RemoteZeppelinServerResource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
/**
* Remote Zeppelin Server Resource
*/
public class RemoteZeppelinServerResource {
/**
* Resource Type for Zeppelin Server
*/
public enum Type{
PARAGRAPH_RUNNERS
}
private String ownerKey;
private Type resourceType;
private Object data;
public Type getResourceType() {
return resourceType;
}
public String getOwnerKey() {
return ownerKey;
}
public void setOwnerKey(String ownerKey) {
this.ownerKey = ownerKey;
}
public void setResourceType(Type resourceType) {
this.resourceType = resourceType;
}
public Object getData() {
return data;
}
public void setData(Object data) {
this.data = data;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterUtils.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterUtils.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.lang.reflect.InvocationTargetException;
/**
* Interpreter utility functions
*/
public class InterpreterUtils {
public static String getMostRelevantMessage(Exception ex) {
if (ex instanceof InvocationTargetException) {
Throwable cause = ((InvocationTargetException) ex).getCause();
if (cause != null) {
return cause.getMessage();
}
}
return ex.getMessage();
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutput.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutput.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
/**
* InterpreterOutput is OutputStream that supposed to print content on notebook
* in addition to InterpreterResult which used to return from Interpreter.interpret().
*/
public class InterpreterOutput extends OutputStream {
Logger logger = LoggerFactory.getLogger(InterpreterOutput.class);
private final int NEW_LINE_CHAR = '\n';
private List<InterpreterResultMessageOutput> resultMessageOutputs = new LinkedList<>();
private InterpreterResultMessageOutput currentOut;
private List<String> resourceSearchPaths = Collections.synchronizedList(new LinkedList<String>());
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
private final InterpreterOutputListener flushListener;
private final InterpreterOutputChangeListener changeListener;
private int size = 0;
// change static var to set interpreter output limit
// limit will be applied to all InterpreterOutput object.
// so we can expect the consistent behavior
public static int limit = Constants.ZEPPELIN_INTERPRETER_OUTPUT_LIMIT;
public InterpreterOutput(InterpreterOutputListener flushListener) {
this.flushListener = flushListener;
changeListener = null;
clear();
}
public InterpreterOutput(InterpreterOutputListener flushListener,
InterpreterOutputChangeListener listener)
throws IOException {
this.flushListener = flushListener;
this.changeListener = listener;
clear();
}
public void setType(InterpreterResult.Type type) throws IOException {
InterpreterResultMessageOutput out = null;
synchronized (resultMessageOutputs) {
int index = resultMessageOutputs.size();
InterpreterResultMessageOutputListener listener =
createInterpreterResultMessageOutputListener(index);
if (changeListener == null) {
out = new InterpreterResultMessageOutput(type, listener);
} else {
out = new InterpreterResultMessageOutput(type, listener, changeListener);
}
out.setResourceSearchPaths(resourceSearchPaths);
buffer.reset();
size = 0;
if (currentOut != null) {
currentOut.flush();
}
resultMessageOutputs.add(out);
currentOut = out;
}
}
public InterpreterResultMessageOutputListener createInterpreterResultMessageOutputListener(
final int index) {
return new InterpreterResultMessageOutputListener() {
final int idx = index;
@Override
public void onAppend(InterpreterResultMessageOutput out, byte[] line) {
if (flushListener != null) {
flushListener.onAppend(idx, out, line);
}
}
@Override
public void onUpdate(InterpreterResultMessageOutput out) {
if (flushListener != null) {
flushListener.onUpdate(idx, out);
}
}
};
}
public InterpreterResultMessageOutput getCurrentOutput() {
synchronized (resultMessageOutputs) {
return currentOut;
}
}
public InterpreterResultMessageOutput getOutputAt(int index) {
synchronized (resultMessageOutputs) {
return resultMessageOutputs.get(index);
}
}
public int size() {
synchronized (resultMessageOutputs) {
return resultMessageOutputs.size();
}
}
public void clear() {
size = 0;
truncated = false;
buffer.reset();
synchronized (resultMessageOutputs) {
for (InterpreterResultMessageOutput out : resultMessageOutputs) {
out.clear();
try {
out.close();
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
// clear all ResultMessages
resultMessageOutputs.clear();
currentOut = null;
startOfTheNewLine = true;
firstCharIsPercentSign = false;
updateAllResultMessages();
}
}
private void updateAllResultMessages() {
if (flushListener != null) {
flushListener.onUpdateAll(this);
}
}
int previousChar = 0;
boolean startOfTheNewLine = true;
boolean firstCharIsPercentSign = false;
boolean truncated = false;
@Override
public void write(int b) throws IOException {
InterpreterResultMessageOutput out;
if (truncated) {
return;
}
synchronized (resultMessageOutputs) {
currentOut = getCurrentOutput();
if (++size > limit) {
if (b == NEW_LINE_CHAR && currentOut != null) {
InterpreterResult.Type type = currentOut.getType();
if (type == InterpreterResult.Type.TEXT || type == InterpreterResult.Type.TABLE) {
setType(InterpreterResult.Type.TEXT);
getCurrentOutput().write("Output exceeds " + limit + ". Truncated.\n");
truncated = true;
return;
}
}
}
if (startOfTheNewLine) {
if (b == '%') {
startOfTheNewLine = false;
firstCharIsPercentSign = true;
buffer.write(b);
previousChar = b;
return;
} else if (b != NEW_LINE_CHAR) {
startOfTheNewLine = false;
}
}
if (b == NEW_LINE_CHAR) {
if (currentOut != null && currentOut.getType() == InterpreterResult.Type.TABLE) {
if (previousChar == NEW_LINE_CHAR) {
startOfTheNewLine = true;
return;
}
} else {
startOfTheNewLine = true;
}
}
boolean flushBuffer = false;
if (firstCharIsPercentSign) {
if (b == ' ' || b == NEW_LINE_CHAR || b == '\t') {
firstCharIsPercentSign = false;
String displaySystem = buffer.toString();
for (InterpreterResult.Type type : InterpreterResult.Type.values()) {
if (displaySystem.equals('%' + type.name().toLowerCase())) {
// new type detected
setType(type);
previousChar = b;
return;
}
}
// not a defined display system
flushBuffer = true;
} else {
buffer.write(b);
previousChar = b;
return;
}
}
out = getCurrentOutputForWriting();
if (flushBuffer) {
out.write(buffer.toByteArray());
buffer.reset();
}
out.write(b);
previousChar = b;
}
}
private InterpreterResultMessageOutput getCurrentOutputForWriting() throws IOException {
synchronized (resultMessageOutputs) {
InterpreterResultMessageOutput out = getCurrentOutput();
if (out == null) {
// add text type result message
setType(InterpreterResult.Type.TEXT);
out = getCurrentOutput();
}
return out;
}
}
@Override
public void write(byte [] b) throws IOException {
write(b, 0, b.length);
}
@Override
public void write(byte [] b, int off, int len) throws IOException {
for (int i = off; i < len; i++) {
write(b[i]);
}
}
/**
* In dev mode, it monitors file and update ZeppelinServer
* @param file
* @throws IOException
*/
public void write(File file) throws IOException {
InterpreterResultMessageOutput out = getCurrentOutputForWriting();
out.write(file);
}
public void write(String string) throws IOException {
write(string.getBytes());
}
/**
* write contents in the resource file in the classpath
* @param url
* @throws IOException
*/
public void write(URL url) throws IOException {
InterpreterResultMessageOutput out = getCurrentOutputForWriting();
out.write(url);
}
public void addResourceSearchPath(String path) {
resourceSearchPaths.add(path);
}
public void writeResource(String resourceName) throws IOException {
InterpreterResultMessageOutput out = getCurrentOutputForWriting();
out.writeResource(resourceName);
}
public List<InterpreterResultMessage> toInterpreterResultMessage() throws IOException {
List<InterpreterResultMessage> list = new LinkedList<>();
synchronized (resultMessageOutputs) {
for (InterpreterResultMessageOutput out : resultMessageOutputs) {
list.add(out.toInterpreterResultMessage());
}
}
return list;
}
public void flush() throws IOException {
InterpreterResultMessageOutput out = getCurrentOutput();
if (out != null) {
out.flush();
}
}
public byte[] toByteArray() throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
synchronized (resultMessageOutputs) {
for (InterpreterResultMessageOutput m : resultMessageOutputs) {
out.write(m.toByteArray());
}
}
return out.toByteArray();
}
@Override
public void close() throws IOException {
synchronized (resultMessageOutputs) {
for (InterpreterResultMessageOutput out : resultMessageOutputs) {
out.close();
}
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResultMessage.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResultMessage.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
/**
* Interpreter result message
*/
public class InterpreterResultMessage {
InterpreterResult.Type type;
String data;
public InterpreterResultMessage(InterpreterResult.Type type, String data) {
this.type = type;
this.data = data;
}
public InterpreterResult.Type getType() {
return type;
}
public String getData() {
return data;
}
public String toString() {
return "%" + type.name().toLowerCase() + " " + data;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterPropertyBuilder.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterPropertyBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.util.HashMap;
import java.util.Map;
/**
* InterpreterPropertyBuilder
*/
public class InterpreterPropertyBuilder {
Map<String, InterpreterProperty> properties = new HashMap<>();
public InterpreterPropertyBuilder add(String name, String defaultValue, String description){
properties.put(name, new InterpreterProperty(defaultValue, description));
return this;
}
public InterpreterPropertyBuilder add(String name, String envName, String propertyName,
String defaultValue, String description){
properties.put(name,
new InterpreterProperty(envName, propertyName, defaultValue, description));
return this;
}
public Map<String, InterpreterProperty> build(){
return properties;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Constants.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Constants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
/**
* Interpreter related constants
*
*
*/
public class Constants {
public static final String ZEPPELIN_INTERPRETER_PORT = "zeppelin.interpreter.port";
public static final String ZEPPELIN_INTERPRETER_HOST = "zeppelin.interpreter.host";
public static final String EXISTING_PROCESS = "existing_process";
public static final int ZEPPELIN_INTERPRETER_DEFAUlT_PORT = 29914;
public static final int ZEPPELIN_INTERPRETER_OUTPUT_LIMIT = 1024 * 100;
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterContext.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.util.List;
import java.util.Map;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.user.AuthenticationInfo;
import org.apache.zeppelin.display.GUI;
import org.apache.zeppelin.interpreter.remote.RemoteEventClientWrapper;
import org.apache.zeppelin.interpreter.remote.RemoteEventClient;
import org.apache.zeppelin.interpreter.remote.RemoteInterpreterEventClient;
import org.apache.zeppelin.resource.ResourcePool;
/**
* Interpreter context
*/
public class InterpreterContext {
private static final ThreadLocal<InterpreterContext> threadIC = new ThreadLocal<>();
public final InterpreterOutput out;
public static InterpreterContext get() {
return threadIC.get();
}
public static void set(InterpreterContext ic) {
threadIC.set(ic);
}
public static void remove() {
threadIC.remove();
}
private final String noteId;
private final String replName;
private final String paragraphTitle;
private final String paragraphId;
private final String paragraphText;
private AuthenticationInfo authenticationInfo;
private final Map<String, Object> config;
private GUI gui;
private AngularObjectRegistry angularObjectRegistry;
private ResourcePool resourcePool;
private List<InterpreterContextRunner> runners;
private String className;
private RemoteEventClientWrapper client;
private RemoteWorksController remoteWorksController;
public InterpreterContext(String noteId,
String paragraphId,
String replName,
String paragraphTitle,
String paragraphText,
AuthenticationInfo authenticationInfo,
Map<String, Object> config,
GUI gui,
AngularObjectRegistry angularObjectRegistry,
ResourcePool resourcePool,
List<InterpreterContextRunner> runners,
InterpreterOutput out
) {
this(noteId, paragraphId, replName, paragraphTitle, paragraphText, authenticationInfo,
config, gui, angularObjectRegistry, resourcePool, runners, out, null);
}
public InterpreterContext(String noteId,
String paragraphId,
String replName,
String paragraphTitle,
String paragraphText,
AuthenticationInfo authenticationInfo,
Map<String, Object> config,
GUI gui,
AngularObjectRegistry angularObjectRegistry,
ResourcePool resourcePool,
List<InterpreterContextRunner> runners,
InterpreterOutput out,
RemoteWorksController remoteWorksController
) {
this.noteId = noteId;
this.paragraphId = paragraphId;
this.replName = replName;
this.paragraphTitle = paragraphTitle;
this.paragraphText = paragraphText;
this.authenticationInfo = authenticationInfo;
this.config = config;
this.gui = gui;
this.angularObjectRegistry = angularObjectRegistry;
this.resourcePool = resourcePool;
this.runners = runners;
this.out = out;
this.remoteWorksController = remoteWorksController;
}
public InterpreterContext(String noteId,
String paragraphId,
String replName,
String paragraphTitle,
String paragraphText,
AuthenticationInfo authenticationInfo,
Map<String, Object> config,
GUI gui,
AngularObjectRegistry angularObjectRegistry,
ResourcePool resourcePool,
List<InterpreterContextRunner> contextRunners,
InterpreterOutput output,
RemoteWorksController remoteWorksController,
RemoteInterpreterEventClient eventClient) {
this(noteId, paragraphId, replName, paragraphTitle, paragraphText, authenticationInfo,
config, gui, angularObjectRegistry, resourcePool, contextRunners, output,
remoteWorksController);
this.client = new RemoteEventClient(eventClient);
}
public String getNoteId() {
return noteId;
}
public String getReplName() {
return replName;
}
public String getParagraphId() {
return paragraphId;
}
public String getParagraphText() {
return paragraphText;
}
public String getParagraphTitle() {
return paragraphTitle;
}
public AuthenticationInfo getAuthenticationInfo() {
return authenticationInfo;
}
public Map<String, Object> getConfig() {
return config;
}
public GUI getGui() {
return gui;
}
public AngularObjectRegistry getAngularObjectRegistry() {
return angularObjectRegistry;
}
public ResourcePool getResourcePool() {
return resourcePool;
}
public List<InterpreterContextRunner> getRunners() {
return runners;
}
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
public RemoteEventClientWrapper getClient() {
return client;
}
public RemoteWorksController getRemoteWorksController() {
return remoteWorksController;
}
public void setRemoteWorksController(RemoteWorksController remoteWorksController) {
this.remoteWorksController = remoteWorksController;
}
public InterpreterOutput out() {
return out;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResultMessageOutput.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResultMessageOutput.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.net.URL;
import java.util.LinkedList;
import java.util.List;
/**
* InterpreterMessageOutputStream
*/
public class InterpreterResultMessageOutput extends OutputStream {
Logger logger = LoggerFactory.getLogger(InterpreterResultMessageOutput.class);
private final int NEW_LINE_CHAR = '\n';
private List<String> resourceSearchPaths;
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
private final List<Object> outList = new LinkedList<>();
private InterpreterOutputChangeWatcher watcher;
private final InterpreterResultMessageOutputListener flushListener;
private InterpreterResult.Type type = InterpreterResult.Type.TEXT;
private boolean firstWrite = true;
public InterpreterResultMessageOutput(
InterpreterResult.Type type,
InterpreterResultMessageOutputListener listener) {
this.type = type;
this.flushListener = listener;
}
public InterpreterResultMessageOutput(
InterpreterResult.Type type,
InterpreterResultMessageOutputListener flushListener,
InterpreterOutputChangeListener listener) throws IOException {
this.type = type;
this.flushListener = flushListener;
watcher = new InterpreterOutputChangeWatcher(listener);
watcher.start();
}
public InterpreterResult.Type getType() {
return type;
}
public void setType(InterpreterResult.Type type) {
if (this.type != type) {
clear();
this.type = type;
}
}
public void clear() {
synchronized (outList) {
buffer.reset();
outList.clear();
if (watcher != null) {
watcher.clear();
}
if (flushListener != null) {
flushListener.onUpdate(this);
}
}
}
@Override
public void write(int b) throws IOException {
synchronized (outList) {
buffer.write(b);
if (b == NEW_LINE_CHAR) {
// first time use of this outputstream.
if (firstWrite) {
// clear the output on gui
if (flushListener != null) {
flushListener.onUpdate(this);
}
firstWrite = false;
}
if (isAppendSupported()) {
flush(true);
}
}
}
}
@Override
public void write(byte [] b) throws IOException {
write(b, 0, b.length);
}
@Override
public void write(byte [] b, int off, int len) throws IOException {
synchronized (outList) {
for (int i = off; i < len; i++) {
write(b[i]);
}
}
}
/**
* In dev mode, it monitors file and update ZeppelinServer
* @param file
* @throws IOException
*/
public void write(File file) throws IOException {
outList.add(file);
if (watcher != null) {
watcher.watch(file);
}
}
public void write(String string) throws IOException {
write(string.getBytes());
}
/**
* write contents in the resource file in the classpath
* @param url
* @throws IOException
*/
public void write(URL url) throws IOException {
outList.add(url);
}
public void setResourceSearchPaths(List<String> resourceSearchPaths) {
this.resourceSearchPaths = resourceSearchPaths;
}
public void writeResource(String resourceName) throws IOException {
// search file under provided paths first, for dev mode
for (String path : resourceSearchPaths) {
File res = new File(path + "/" + resourceName);
if (res.isFile()) {
write(res);
return;
}
}
// search from classpath
ClassLoader cl = Thread.currentThread().getContextClassLoader();
if (cl == null) {
cl = this.getClass().getClassLoader();
}
if (cl == null) {
cl = ClassLoader.getSystemClassLoader();
}
write(cl.getResource(resourceName));
}
public byte[] toByteArray() throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
List<Object> all = new LinkedList<>();
synchronized (outList) {
all.addAll(outList);
}
for (Object o : all) {
if (o instanceof File) {
File f = (File) o;
FileInputStream fin = new FileInputStream(f);
copyStream(fin, out);
fin.close();
} else if (o instanceof byte[]) {
out.write((byte[]) o);
} else if (o instanceof Integer) {
out.write((int) o);
} else if (o instanceof URL) {
InputStream fin = ((URL) o).openStream();
copyStream(fin, out);
fin.close();
} else {
// can not handle the object
}
}
out.close();
return out.toByteArray();
}
public InterpreterResultMessage toInterpreterResultMessage() throws IOException {
return new InterpreterResultMessage(type, new String(toByteArray()));
}
private void flush(boolean append) throws IOException {
synchronized (outList) {
buffer.flush();
byte[] bytes = buffer.toByteArray();
if (bytes != null && bytes.length > 0) {
outList.add(bytes);
if (append) {
if (flushListener != null) {
flushListener.onAppend(this, bytes);
}
} else {
if (flushListener != null) {
flushListener.onUpdate(this);
}
}
}
buffer.reset();
}
}
public void flush() throws IOException {
flush(isAppendSupported());
}
public boolean isAppendSupported() {
return type == InterpreterResult.Type.TEXT;
}
private void copyStream(InputStream in, OutputStream out) throws IOException {
int bufferSize = 8192;
byte[] buffer = new byte[bufferSize];
while (true) {
int bytesRead = in.read(buffer);
if (bytesRead == -1) {
break;
} else {
out.write(buffer, 0, bytesRead);
}
}
}
@Override
public void close() throws IOException {
flush();
if (watcher != null) {
watcher.clear();
watcher.shutdown();
}
}
public String toString() {
try {
return "%" + type.name().toLowerCase() + " " + new String(toByteArray());
} catch (IOException e) {
logger.error(e.getMessage(), e);
return "%" + type.name().toLowerCase() + "\n";
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/util/InterpreterOutputStream.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/util/InterpreterOutputStream.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.util;
import org.apache.zeppelin.interpreter.InterpreterOutput;
import org.slf4j.Logger;
import java.io.IOException;
/**
* Output Stream integrated with InterpreterOutput.
*
* Can be used to channel output from interpreters.
*/
public class InterpreterOutputStream extends LogOutputStream {
public static Logger logger;
InterpreterOutput interpreterOutput;
public InterpreterOutputStream(Logger logger) {
this.logger = logger;
}
public InterpreterOutput getInterpreterOutput() {
return interpreterOutput;
}
public void setInterpreterOutput(InterpreterOutput interpreterOutput) {
this.interpreterOutput = interpreterOutput;
}
@Override
public void write(int b) throws IOException {
super.write(b);
if (interpreterOutput != null) {
interpreterOutput.write(b);
}
}
@Override
public void write(byte [] b) throws IOException {
super.write(b);
if (interpreterOutput != null) {
interpreterOutput.write(b);
}
}
@Override
public void write(byte [] b, int offset, int len) throws IOException {
super.write(b, offset, len);
if (interpreterOutput != null) {
interpreterOutput.write(b, offset, len);
}
}
@Override
protected void processLine(String s, int i) {
logger.debug("Interpreter output:" + s);
}
@Override
public void close() throws IOException {
super.close();
if (interpreterOutput != null) {
interpreterOutput.close();
}
}
@Override
public void flush() throws IOException {
super.flush();
if (interpreterOutput != null) {
interpreterOutput.flush();
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/util/LogOutputStream.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/util/LogOutputStream.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.util;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
/**
* Minor modification of LogOutputStream of apache commons exec.
* LogOutputStream of apache commons exec has one issue that method flush doesn't throw IOException,
* so that SparkOutputStream can not extend it correctly.
*/
public abstract class LogOutputStream extends OutputStream {
private static final int INTIAL_SIZE = 132;
private static final int CR = 13;
private static final int LF = 10;
private final ByteArrayOutputStream buffer;
private boolean skip;
private final int level;
public LogOutputStream() {
this(999);
}
public LogOutputStream(int level) {
this.buffer = new ByteArrayOutputStream(132);
this.skip = false;
this.level = level;
}
@Override
public void write(int cc) throws IOException {
byte c = (byte) cc;
if (c != 10 && c != 13) {
this.buffer.write(cc);
} else if (!this.skip) {
this.processBuffer();
}
this.skip = c == 13;
}
@Override
public void flush() throws IOException {
if (this.buffer.size() > 0) {
this.processBuffer();
}
}
@Override
public void close() throws IOException {
if (this.buffer.size() > 0) {
this.processBuffer();
}
super.close();
}
public int getMessageLevel() {
return this.level;
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
int offset = off;
int blockStartOffset = off;
for (int remaining = len; remaining > 0; blockStartOffset = offset) {
while (remaining > 0 && b[offset] != 10 && b[offset] != 13) {
++offset;
--remaining;
}
int blockLength = offset - blockStartOffset;
if (blockLength > 0) {
this.buffer.write(b, blockStartOffset, blockLength);
}
while (remaining > 0 && (b[offset] == 10 || b[offset] == 13)) {
this.write(b[offset]);
++offset;
--remaining;
}
}
}
protected void processBuffer() {
this.processLine(this.buffer.toString());
this.buffer.reset();
}
protected void processLine(String line) {
this.processLine(line, this.level);
}
protected abstract void processLine(String var1, int var2);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterResultMessage.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterResultMessage.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.zeppelin.interpreter.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2016-11-29")
public class RemoteInterpreterResultMessage implements org.apache.thrift.TBase<RemoteInterpreterResultMessage, RemoteInterpreterResultMessage._Fields>, java.io.Serializable, Cloneable, Comparable<RemoteInterpreterResultMessage> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RemoteInterpreterResultMessage");
private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField DATA_FIELD_DESC = new org.apache.thrift.protocol.TField("data", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new RemoteInterpreterResultMessageStandardSchemeFactory());
schemes.put(TupleScheme.class, new RemoteInterpreterResultMessageTupleSchemeFactory());
}
public String type; // required
public String data; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
TYPE((short)1, "type"),
DATA((short)2, "data");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // TYPE
return TYPE;
case 2: // DATA
return DATA;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.DATA, new org.apache.thrift.meta_data.FieldMetaData("data", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RemoteInterpreterResultMessage.class, metaDataMap);
}
public RemoteInterpreterResultMessage() {
}
public RemoteInterpreterResultMessage(
String type,
String data)
{
this();
this.type = type;
this.data = data;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public RemoteInterpreterResultMessage(RemoteInterpreterResultMessage other) {
if (other.isSetType()) {
this.type = other.type;
}
if (other.isSetData()) {
this.data = other.data;
}
}
public RemoteInterpreterResultMessage deepCopy() {
return new RemoteInterpreterResultMessage(this);
}
@Override
public void clear() {
this.type = null;
this.data = null;
}
public String getType() {
return this.type;
}
public RemoteInterpreterResultMessage setType(String type) {
this.type = type;
return this;
}
public void unsetType() {
this.type = null;
}
/** Returns true if field type is set (has been assigned a value) and false otherwise */
public boolean isSetType() {
return this.type != null;
}
public void setTypeIsSet(boolean value) {
if (!value) {
this.type = null;
}
}
public String getData() {
return this.data;
}
public RemoteInterpreterResultMessage setData(String data) {
this.data = data;
return this;
}
public void unsetData() {
this.data = null;
}
/** Returns true if field data is set (has been assigned a value) and false otherwise */
public boolean isSetData() {
return this.data != null;
}
public void setDataIsSet(boolean value) {
if (!value) {
this.data = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case TYPE:
if (value == null) {
unsetType();
} else {
setType((String)value);
}
break;
case DATA:
if (value == null) {
unsetData();
} else {
setData((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case TYPE:
return getType();
case DATA:
return getData();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case TYPE:
return isSetType();
case DATA:
return isSetData();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof RemoteInterpreterResultMessage)
return this.equals((RemoteInterpreterResultMessage)that);
return false;
}
public boolean equals(RemoteInterpreterResultMessage that) {
if (that == null)
return false;
boolean this_present_type = true && this.isSetType();
boolean that_present_type = true && that.isSetType();
if (this_present_type || that_present_type) {
if (!(this_present_type && that_present_type))
return false;
if (!this.type.equals(that.type))
return false;
}
boolean this_present_data = true && this.isSetData();
boolean that_present_data = true && that.isSetData();
if (this_present_data || that_present_data) {
if (!(this_present_data && that_present_data))
return false;
if (!this.data.equals(that.data))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_type = true && (isSetType());
list.add(present_type);
if (present_type)
list.add(type);
boolean present_data = true && (isSetData());
list.add(present_data);
if (present_data)
list.add(data);
return list.hashCode();
}
@Override
public int compareTo(RemoteInterpreterResultMessage other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetType()).compareTo(other.isSetType());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetType()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, other.type);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetData()).compareTo(other.isSetData());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetData()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.data, other.data);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("RemoteInterpreterResultMessage(");
boolean first = true;
sb.append("type:");
if (this.type == null) {
sb.append("null");
} else {
sb.append(this.type);
}
first = false;
if (!first) sb.append(", ");
sb.append("data:");
if (this.data == null) {
sb.append("null");
} else {
sb.append(this.data);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class RemoteInterpreterResultMessageStandardSchemeFactory implements SchemeFactory {
public RemoteInterpreterResultMessageStandardScheme getScheme() {
return new RemoteInterpreterResultMessageStandardScheme();
}
}
private static class RemoteInterpreterResultMessageStandardScheme extends StandardScheme<RemoteInterpreterResultMessage> {
public void read(org.apache.thrift.protocol.TProtocol iprot, RemoteInterpreterResultMessage struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // TYPE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.type = iprot.readString();
struct.setTypeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // DATA
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.data = iprot.readString();
struct.setDataIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, RemoteInterpreterResultMessage struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.type != null) {
oprot.writeFieldBegin(TYPE_FIELD_DESC);
oprot.writeString(struct.type);
oprot.writeFieldEnd();
}
if (struct.data != null) {
oprot.writeFieldBegin(DATA_FIELD_DESC);
oprot.writeString(struct.data);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class RemoteInterpreterResultMessageTupleSchemeFactory implements SchemeFactory {
public RemoteInterpreterResultMessageTupleScheme getScheme() {
return new RemoteInterpreterResultMessageTupleScheme();
}
}
private static class RemoteInterpreterResultMessageTupleScheme extends TupleScheme<RemoteInterpreterResultMessage> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterResultMessage struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetType()) {
optionals.set(0);
}
if (struct.isSetData()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetType()) {
oprot.writeString(struct.type);
}
if (struct.isSetData()) {
oprot.writeString(struct.data);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterResultMessage struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.type = iprot.readString();
struct.setTypeIsSet(true);
}
if (incoming.get(1)) {
struct.data = iprot.readString();
struct.setDataIsSet(true);
}
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/InterpreterCompletion.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/InterpreterCompletion.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.zeppelin.interpreter.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2016-11-29")
public class InterpreterCompletion implements org.apache.thrift.TBase<InterpreterCompletion, InterpreterCompletion._Fields>, java.io.Serializable, Cloneable, Comparable<InterpreterCompletion> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InterpreterCompletion");
private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("value", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new InterpreterCompletionStandardSchemeFactory());
schemes.put(TupleScheme.class, new InterpreterCompletionTupleSchemeFactory());
}
public String name; // required
public String value; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
NAME((short)1, "name"),
VALUE((short)2, "value");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // NAME
return NAME;
case 2: // VALUE
return VALUE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.VALUE, new org.apache.thrift.meta_data.FieldMetaData("value", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(InterpreterCompletion.class, metaDataMap);
}
public InterpreterCompletion() {
}
public InterpreterCompletion(
String name,
String value)
{
this();
this.name = name;
this.value = value;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public InterpreterCompletion(InterpreterCompletion other) {
if (other.isSetName()) {
this.name = other.name;
}
if (other.isSetValue()) {
this.value = other.value;
}
}
public InterpreterCompletion deepCopy() {
return new InterpreterCompletion(this);
}
@Override
public void clear() {
this.name = null;
this.value = null;
}
public String getName() {
return this.name;
}
public InterpreterCompletion setName(String name) {
this.name = name;
return this;
}
public void unsetName() {
this.name = null;
}
/** Returns true if field name is set (has been assigned a value) and false otherwise */
public boolean isSetName() {
return this.name != null;
}
public void setNameIsSet(boolean value) {
if (!value) {
this.name = null;
}
}
public String getValue() {
return this.value;
}
public InterpreterCompletion setValue(String value) {
this.value = value;
return this;
}
public void unsetValue() {
this.value = null;
}
/** Returns true if field value is set (has been assigned a value) and false otherwise */
public boolean isSetValue() {
return this.value != null;
}
public void setValueIsSet(boolean value) {
if (!value) {
this.value = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case NAME:
if (value == null) {
unsetName();
} else {
setName((String)value);
}
break;
case VALUE:
if (value == null) {
unsetValue();
} else {
setValue((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case NAME:
return getName();
case VALUE:
return getValue();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case NAME:
return isSetName();
case VALUE:
return isSetValue();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof InterpreterCompletion)
return this.equals((InterpreterCompletion)that);
return false;
}
public boolean equals(InterpreterCompletion that) {
if (that == null)
return false;
boolean this_present_name = true && this.isSetName();
boolean that_present_name = true && that.isSetName();
if (this_present_name || that_present_name) {
if (!(this_present_name && that_present_name))
return false;
if (!this.name.equals(that.name))
return false;
}
boolean this_present_value = true && this.isSetValue();
boolean that_present_value = true && that.isSetValue();
if (this_present_value || that_present_value) {
if (!(this_present_value && that_present_value))
return false;
if (!this.value.equals(that.value))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_name = true && (isSetName());
list.add(present_name);
if (present_name)
list.add(name);
boolean present_value = true && (isSetValue());
list.add(present_value);
if (present_value)
list.add(value);
return list.hashCode();
}
@Override
public int compareTo(InterpreterCompletion other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetName()).compareTo(other.isSetName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetValue()).compareTo(other.isSetValue());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetValue()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.value, other.value);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("InterpreterCompletion(");
boolean first = true;
sb.append("name:");
if (this.name == null) {
sb.append("null");
} else {
sb.append(this.name);
}
first = false;
if (!first) sb.append(", ");
sb.append("value:");
if (this.value == null) {
sb.append("null");
} else {
sb.append(this.value);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class InterpreterCompletionStandardSchemeFactory implements SchemeFactory {
public InterpreterCompletionStandardScheme getScheme() {
return new InterpreterCompletionStandardScheme();
}
}
private static class InterpreterCompletionStandardScheme extends StandardScheme<InterpreterCompletion> {
public void read(org.apache.thrift.protocol.TProtocol iprot, InterpreterCompletion struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.name = iprot.readString();
struct.setNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // VALUE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.value = iprot.readString();
struct.setValueIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, InterpreterCompletion struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.name != null) {
oprot.writeFieldBegin(NAME_FIELD_DESC);
oprot.writeString(struct.name);
oprot.writeFieldEnd();
}
if (struct.value != null) {
oprot.writeFieldBegin(VALUE_FIELD_DESC);
oprot.writeString(struct.value);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class InterpreterCompletionTupleSchemeFactory implements SchemeFactory {
public InterpreterCompletionTupleScheme getScheme() {
return new InterpreterCompletionTupleScheme();
}
}
private static class InterpreterCompletionTupleScheme extends TupleScheme<InterpreterCompletion> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, InterpreterCompletion struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetName()) {
optionals.set(0);
}
if (struct.isSetValue()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetName()) {
oprot.writeString(struct.name);
}
if (struct.isSetValue()) {
oprot.writeString(struct.value);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, InterpreterCompletion struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.name = iprot.readString();
struct.setNameIsSet(true);
}
if (incoming.get(1)) {
struct.value = iprot.readString();
struct.setValueIsSet(true);
}
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteApplicationResult.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteApplicationResult.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.zeppelin.interpreter.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2016-11-29")
public class RemoteApplicationResult implements org.apache.thrift.TBase<RemoteApplicationResult, RemoteApplicationResult._Fields>, java.io.Serializable, Cloneable, Comparable<RemoteApplicationResult> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RemoteApplicationResult");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.BOOL, (short)1);
private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new RemoteApplicationResultStandardSchemeFactory());
schemes.put(TupleScheme.class, new RemoteApplicationResultTupleSchemeFactory());
}
public boolean success; // required
public String msg; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)1, "success"),
MSG((short)2, "msg");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // SUCCESS
return SUCCESS;
case 2: // MSG
return MSG;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __SUCCESS_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RemoteApplicationResult.class, metaDataMap);
}
public RemoteApplicationResult() {
}
public RemoteApplicationResult(
boolean success,
String msg)
{
this();
this.success = success;
setSuccessIsSet(true);
this.msg = msg;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public RemoteApplicationResult(RemoteApplicationResult other) {
__isset_bitfield = other.__isset_bitfield;
this.success = other.success;
if (other.isSetMsg()) {
this.msg = other.msg;
}
}
public RemoteApplicationResult deepCopy() {
return new RemoteApplicationResult(this);
}
@Override
public void clear() {
setSuccessIsSet(false);
this.success = false;
this.msg = null;
}
public boolean isSuccess() {
return this.success;
}
public RemoteApplicationResult setSuccess(boolean success) {
this.success = success;
setSuccessIsSet(true);
return this;
}
public void unsetSuccess() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return EncodingUtils.testBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
public void setSuccessIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SUCCESS_ISSET_ID, value);
}
public String getMsg() {
return this.msg;
}
public RemoteApplicationResult setMsg(String msg) {
this.msg = msg;
return this;
}
public void unsetMsg() {
this.msg = null;
}
/** Returns true if field msg is set (has been assigned a value) and false otherwise */
public boolean isSetMsg() {
return this.msg != null;
}
public void setMsgIsSet(boolean value) {
if (!value) {
this.msg = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((Boolean)value);
}
break;
case MSG:
if (value == null) {
unsetMsg();
} else {
setMsg((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return Boolean.valueOf(isSuccess());
case MSG:
return getMsg();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
case MSG:
return isSetMsg();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof RemoteApplicationResult)
return this.equals((RemoteApplicationResult)that);
return false;
}
public boolean equals(RemoteApplicationResult that) {
if (that == null)
return false;
boolean this_present_success = true;
boolean that_present_success = true;
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (this.success != that.success)
return false;
}
boolean this_present_msg = true && this.isSetMsg();
boolean that_present_msg = true && that.isSetMsg();
if (this_present_msg || that_present_msg) {
if (!(this_present_msg && that_present_msg))
return false;
if (!this.msg.equals(that.msg))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_success = true;
list.add(present_success);
if (present_success)
list.add(success);
boolean present_msg = true && (isSetMsg());
list.add(present_msg);
if (present_msg)
list.add(msg);
return list.hashCode();
}
@Override
public int compareTo(RemoteApplicationResult other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetMsg()).compareTo(other.isSetMsg());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMsg()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("RemoteApplicationResult(");
boolean first = true;
sb.append("success:");
sb.append(this.success);
first = false;
if (!first) sb.append(", ");
sb.append("msg:");
if (this.msg == null) {
sb.append("null");
} else {
sb.append(this.msg);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class RemoteApplicationResultStandardSchemeFactory implements SchemeFactory {
public RemoteApplicationResultStandardScheme getScheme() {
return new RemoteApplicationResultStandardScheme();
}
}
private static class RemoteApplicationResultStandardScheme extends StandardScheme<RemoteApplicationResult> {
public void read(org.apache.thrift.protocol.TProtocol iprot, RemoteApplicationResult struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // MSG
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.msg = iprot.readString();
struct.setMsgIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, RemoteApplicationResult struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeBool(struct.success);
oprot.writeFieldEnd();
if (struct.msg != null) {
oprot.writeFieldBegin(MSG_FIELD_DESC);
oprot.writeString(struct.msg);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class RemoteApplicationResultTupleSchemeFactory implements SchemeFactory {
public RemoteApplicationResultTupleScheme getScheme() {
return new RemoteApplicationResultTupleScheme();
}
}
private static class RemoteApplicationResultTupleScheme extends TupleScheme<RemoteApplicationResult> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, RemoteApplicationResult struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
if (struct.isSetMsg()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetSuccess()) {
oprot.writeBool(struct.success);
}
if (struct.isSetMsg()) {
oprot.writeString(struct.msg);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, RemoteApplicationResult struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
}
if (incoming.get(1)) {
struct.msg = iprot.readString();
struct.setMsgIsSet(true);
}
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterEventType.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterEventType.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.zeppelin.interpreter.thrift;
import java.util.Map;
import java.util.HashMap;
import org.apache.thrift.TEnum;
public enum RemoteInterpreterEventType implements org.apache.thrift.TEnum {
NO_OP(1),
ANGULAR_OBJECT_ADD(2),
ANGULAR_OBJECT_UPDATE(3),
ANGULAR_OBJECT_REMOVE(4),
RUN_INTERPRETER_CONTEXT_RUNNER(5),
RESOURCE_POOL_GET_ALL(6),
RESOURCE_GET(7),
OUTPUT_APPEND(8),
OUTPUT_UPDATE(9),
OUTPUT_UPDATE_ALL(10),
ANGULAR_REGISTRY_PUSH(11),
APP_STATUS_UPDATE(12),
META_INFOS(13),
REMOTE_ZEPPELIN_SERVER_RESOURCE(14);
private final int value;
private RemoteInterpreterEventType(int value) {
this.value = value;
}
/**
* Get the integer value of this enum value, as defined in the Thrift IDL.
*/
public int getValue() {
return value;
}
/**
* Find a the enum type by its integer value, as defined in the Thrift IDL.
* @return null if the value is not found.
*/
public static RemoteInterpreterEventType findByValue(int value) {
switch (value) {
case 1:
return NO_OP;
case 2:
return ANGULAR_OBJECT_ADD;
case 3:
return ANGULAR_OBJECT_UPDATE;
case 4:
return ANGULAR_OBJECT_REMOVE;
case 5:
return RUN_INTERPRETER_CONTEXT_RUNNER;
case 6:
return RESOURCE_POOL_GET_ALL;
case 7:
return RESOURCE_GET;
case 8:
return OUTPUT_APPEND;
case 9:
return OUTPUT_UPDATE;
case 10:
return OUTPUT_UPDATE_ALL;
case 11:
return ANGULAR_REGISTRY_PUSH;
case 12:
return APP_STATUS_UPDATE;
case 13:
return META_INFOS;
case 14:
return REMOTE_ZEPPELIN_SERVER_RESOURCE;
default:
return null;
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/ZeppelinServerResourceParagraphRunner.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/ZeppelinServerResourceParagraphRunner.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.zeppelin.interpreter.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2016-11-29")
public class ZeppelinServerResourceParagraphRunner implements org.apache.thrift.TBase<ZeppelinServerResourceParagraphRunner, ZeppelinServerResourceParagraphRunner._Fields>, java.io.Serializable, Cloneable, Comparable<ZeppelinServerResourceParagraphRunner> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ZeppelinServerResourceParagraphRunner");
private static final org.apache.thrift.protocol.TField NOTE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("noteId", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField PARAGRAPH_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphId", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new ZeppelinServerResourceParagraphRunnerStandardSchemeFactory());
schemes.put(TupleScheme.class, new ZeppelinServerResourceParagraphRunnerTupleSchemeFactory());
}
public String noteId; // required
public String paragraphId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
NOTE_ID((short)1, "noteId"),
PARAGRAPH_ID((short)2, "paragraphId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // NOTE_ID
return NOTE_ID;
case 2: // PARAGRAPH_ID
return PARAGRAPH_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.NOTE_ID, new org.apache.thrift.meta_data.FieldMetaData("noteId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.PARAGRAPH_ID, new org.apache.thrift.meta_data.FieldMetaData("paragraphId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ZeppelinServerResourceParagraphRunner.class, metaDataMap);
}
public ZeppelinServerResourceParagraphRunner() {
}
public ZeppelinServerResourceParagraphRunner(
String noteId,
String paragraphId)
{
this();
this.noteId = noteId;
this.paragraphId = paragraphId;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ZeppelinServerResourceParagraphRunner(ZeppelinServerResourceParagraphRunner other) {
if (other.isSetNoteId()) {
this.noteId = other.noteId;
}
if (other.isSetParagraphId()) {
this.paragraphId = other.paragraphId;
}
}
public ZeppelinServerResourceParagraphRunner deepCopy() {
return new ZeppelinServerResourceParagraphRunner(this);
}
@Override
public void clear() {
this.noteId = null;
this.paragraphId = null;
}
public String getNoteId() {
return this.noteId;
}
public ZeppelinServerResourceParagraphRunner setNoteId(String noteId) {
this.noteId = noteId;
return this;
}
public void unsetNoteId() {
this.noteId = null;
}
/** Returns true if field noteId is set (has been assigned a value) and false otherwise */
public boolean isSetNoteId() {
return this.noteId != null;
}
public void setNoteIdIsSet(boolean value) {
if (!value) {
this.noteId = null;
}
}
public String getParagraphId() {
return this.paragraphId;
}
public ZeppelinServerResourceParagraphRunner setParagraphId(String paragraphId) {
this.paragraphId = paragraphId;
return this;
}
public void unsetParagraphId() {
this.paragraphId = null;
}
/** Returns true if field paragraphId is set (has been assigned a value) and false otherwise */
public boolean isSetParagraphId() {
return this.paragraphId != null;
}
public void setParagraphIdIsSet(boolean value) {
if (!value) {
this.paragraphId = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case NOTE_ID:
if (value == null) {
unsetNoteId();
} else {
setNoteId((String)value);
}
break;
case PARAGRAPH_ID:
if (value == null) {
unsetParagraphId();
} else {
setParagraphId((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case NOTE_ID:
return getNoteId();
case PARAGRAPH_ID:
return getParagraphId();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case NOTE_ID:
return isSetNoteId();
case PARAGRAPH_ID:
return isSetParagraphId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof ZeppelinServerResourceParagraphRunner)
return this.equals((ZeppelinServerResourceParagraphRunner)that);
return false;
}
public boolean equals(ZeppelinServerResourceParagraphRunner that) {
if (that == null)
return false;
boolean this_present_noteId = true && this.isSetNoteId();
boolean that_present_noteId = true && that.isSetNoteId();
if (this_present_noteId || that_present_noteId) {
if (!(this_present_noteId && that_present_noteId))
return false;
if (!this.noteId.equals(that.noteId))
return false;
}
boolean this_present_paragraphId = true && this.isSetParagraphId();
boolean that_present_paragraphId = true && that.isSetParagraphId();
if (this_present_paragraphId || that_present_paragraphId) {
if (!(this_present_paragraphId && that_present_paragraphId))
return false;
if (!this.paragraphId.equals(that.paragraphId))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_noteId = true && (isSetNoteId());
list.add(present_noteId);
if (present_noteId)
list.add(noteId);
boolean present_paragraphId = true && (isSetParagraphId());
list.add(present_paragraphId);
if (present_paragraphId)
list.add(paragraphId);
return list.hashCode();
}
@Override
public int compareTo(ZeppelinServerResourceParagraphRunner other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetNoteId()).compareTo(other.isSetNoteId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetNoteId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.noteId, other.noteId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetParagraphId()).compareTo(other.isSetParagraphId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetParagraphId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphId, other.paragraphId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("ZeppelinServerResourceParagraphRunner(");
boolean first = true;
sb.append("noteId:");
if (this.noteId == null) {
sb.append("null");
} else {
sb.append(this.noteId);
}
first = false;
if (!first) sb.append(", ");
sb.append("paragraphId:");
if (this.paragraphId == null) {
sb.append("null");
} else {
sb.append(this.paragraphId);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ZeppelinServerResourceParagraphRunnerStandardSchemeFactory implements SchemeFactory {
public ZeppelinServerResourceParagraphRunnerStandardScheme getScheme() {
return new ZeppelinServerResourceParagraphRunnerStandardScheme();
}
}
private static class ZeppelinServerResourceParagraphRunnerStandardScheme extends StandardScheme<ZeppelinServerResourceParagraphRunner> {
public void read(org.apache.thrift.protocol.TProtocol iprot, ZeppelinServerResourceParagraphRunner struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // NOTE_ID
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.noteId = iprot.readString();
struct.setNoteIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // PARAGRAPH_ID
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.paragraphId = iprot.readString();
struct.setParagraphIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, ZeppelinServerResourceParagraphRunner struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.noteId != null) {
oprot.writeFieldBegin(NOTE_ID_FIELD_DESC);
oprot.writeString(struct.noteId);
oprot.writeFieldEnd();
}
if (struct.paragraphId != null) {
oprot.writeFieldBegin(PARAGRAPH_ID_FIELD_DESC);
oprot.writeString(struct.paragraphId);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ZeppelinServerResourceParagraphRunnerTupleSchemeFactory implements SchemeFactory {
public ZeppelinServerResourceParagraphRunnerTupleScheme getScheme() {
return new ZeppelinServerResourceParagraphRunnerTupleScheme();
}
}
private static class ZeppelinServerResourceParagraphRunnerTupleScheme extends TupleScheme<ZeppelinServerResourceParagraphRunner> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, ZeppelinServerResourceParagraphRunner struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetNoteId()) {
optionals.set(0);
}
if (struct.isSetParagraphId()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetNoteId()) {
oprot.writeString(struct.noteId);
}
if (struct.isSetParagraphId()) {
oprot.writeString(struct.paragraphId);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, ZeppelinServerResourceParagraphRunner struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.noteId = iprot.readString();
struct.setNoteIdIsSet(true);
}
if (incoming.get(1)) {
struct.paragraphId = iprot.readString();
struct.setParagraphIdIsSet(true);
}
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterService.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterService.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.zeppelin.interpreter.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2016-11-29")
public class RemoteInterpreterService {
public interface Iface {
public void createInterpreter(String intpGroupId, String sessionKey, String className, Map<String,String> properties, String userName) throws org.apache.thrift.TException;
public void open(String sessionKey, String className) throws org.apache.thrift.TException;
public void close(String sessionKey, String className) throws org.apache.thrift.TException;
public RemoteInterpreterResult interpret(String sessionKey, String className, String st, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException;
public void cancel(String sessionKey, String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException;
public int getProgress(String sessionKey, String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException;
public String getFormType(String sessionKey, String className) throws org.apache.thrift.TException;
public List<InterpreterCompletion> completion(String sessionKey, String className, String buf, int cursor) throws org.apache.thrift.TException;
public void shutdown() throws org.apache.thrift.TException;
public String getStatus(String sessionKey, String jobId) throws org.apache.thrift.TException;
public RemoteInterpreterEvent getEvent() throws org.apache.thrift.TException;
public void resourcePoolResponseGetAll(List<String> resources) throws org.apache.thrift.TException;
public void resourceResponseGet(String resourceId, ByteBuffer object) throws org.apache.thrift.TException;
public List<String> resourcePoolGetAll() throws org.apache.thrift.TException;
public ByteBuffer resourceGet(String sessionKey, String paragraphId, String resourceName) throws org.apache.thrift.TException;
public boolean resourceRemove(String sessionKey, String paragraphId, String resourceName) throws org.apache.thrift.TException;
public void angularObjectUpdate(String name, String sessionKey, String paragraphId, String object) throws org.apache.thrift.TException;
public void angularObjectAdd(String name, String sessionKey, String paragraphId, String object) throws org.apache.thrift.TException;
public void angularObjectRemove(String name, String sessionKey, String paragraphId) throws org.apache.thrift.TException;
public void angularRegistryPush(String registry) throws org.apache.thrift.TException;
public RemoteApplicationResult loadApplication(String applicationInstanceId, String packageInfo, String sessionKey, String paragraphId) throws org.apache.thrift.TException;
public RemoteApplicationResult unloadApplication(String applicationInstanceId) throws org.apache.thrift.TException;
public RemoteApplicationResult runApplication(String applicationInstanceId) throws org.apache.thrift.TException;
public void onReceivedZeppelinResource(String object) throws org.apache.thrift.TException;
}
public interface AsyncIface {
public void createInterpreter(String intpGroupId, String sessionKey, String className, Map<String,String> properties, String userName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void open(String sessionKey, String className, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void close(String sessionKey, String className, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void interpret(String sessionKey, String className, String st, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void cancel(String sessionKey, String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void getProgress(String sessionKey, String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void getFormType(String sessionKey, String className, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void completion(String sessionKey, String className, String buf, int cursor, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void shutdown(org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void getStatus(String sessionKey, String jobId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void getEvent(org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void resourcePoolResponseGetAll(List<String> resources, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void resourceResponseGet(String resourceId, ByteBuffer object, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void resourcePoolGetAll(org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void resourceGet(String sessionKey, String paragraphId, String resourceName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void resourceRemove(String sessionKey, String paragraphId, String resourceName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void angularObjectUpdate(String name, String sessionKey, String paragraphId, String object, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void angularObjectAdd(String name, String sessionKey, String paragraphId, String object, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void angularObjectRemove(String name, String sessionKey, String paragraphId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void angularRegistryPush(String registry, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void loadApplication(String applicationInstanceId, String packageInfo, String sessionKey, String paragraphId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void unloadApplication(String applicationInstanceId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void runApplication(String applicationInstanceId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void onReceivedZeppelinResource(String object, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
}
public static class Client extends org.apache.thrift.TServiceClient implements Iface {
public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
public Factory() {}
public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
return new Client(prot);
}
public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
return new Client(iprot, oprot);
}
}
public Client(org.apache.thrift.protocol.TProtocol prot)
{
super(prot, prot);
}
public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
super(iprot, oprot);
}
public void createInterpreter(String intpGroupId, String sessionKey, String className, Map<String,String> properties, String userName) throws org.apache.thrift.TException
{
send_createInterpreter(intpGroupId, sessionKey, className, properties, userName);
recv_createInterpreter();
}
public void send_createInterpreter(String intpGroupId, String sessionKey, String className, Map<String,String> properties, String userName) throws org.apache.thrift.TException
{
createInterpreter_args args = new createInterpreter_args();
args.setIntpGroupId(intpGroupId);
args.setSessionKey(sessionKey);
args.setClassName(className);
args.setProperties(properties);
args.setUserName(userName);
sendBase("createInterpreter", args);
}
public void recv_createInterpreter() throws org.apache.thrift.TException
{
createInterpreter_result result = new createInterpreter_result();
receiveBase(result, "createInterpreter");
return;
}
public void open(String sessionKey, String className) throws org.apache.thrift.TException
{
send_open(sessionKey, className);
recv_open();
}
public void send_open(String sessionKey, String className) throws org.apache.thrift.TException
{
open_args args = new open_args();
args.setSessionKey(sessionKey);
args.setClassName(className);
sendBase("open", args);
}
public void recv_open() throws org.apache.thrift.TException
{
open_result result = new open_result();
receiveBase(result, "open");
return;
}
public void close(String sessionKey, String className) throws org.apache.thrift.TException
{
send_close(sessionKey, className);
recv_close();
}
public void send_close(String sessionKey, String className) throws org.apache.thrift.TException
{
close_args args = new close_args();
args.setSessionKey(sessionKey);
args.setClassName(className);
sendBase("close", args);
}
public void recv_close() throws org.apache.thrift.TException
{
close_result result = new close_result();
receiveBase(result, "close");
return;
}
public RemoteInterpreterResult interpret(String sessionKey, String className, String st, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
{
send_interpret(sessionKey, className, st, interpreterContext);
return recv_interpret();
}
public void send_interpret(String sessionKey, String className, String st, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
{
interpret_args args = new interpret_args();
args.setSessionKey(sessionKey);
args.setClassName(className);
args.setSt(st);
args.setInterpreterContext(interpreterContext);
sendBase("interpret", args);
}
public RemoteInterpreterResult recv_interpret() throws org.apache.thrift.TException
{
interpret_result result = new interpret_result();
receiveBase(result, "interpret");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "interpret failed: unknown result");
}
public void cancel(String sessionKey, String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
{
send_cancel(sessionKey, className, interpreterContext);
recv_cancel();
}
public void send_cancel(String sessionKey, String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
{
cancel_args args = new cancel_args();
args.setSessionKey(sessionKey);
args.setClassName(className);
args.setInterpreterContext(interpreterContext);
sendBase("cancel", args);
}
public void recv_cancel() throws org.apache.thrift.TException
{
cancel_result result = new cancel_result();
receiveBase(result, "cancel");
return;
}
public int getProgress(String sessionKey, String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
{
send_getProgress(sessionKey, className, interpreterContext);
return recv_getProgress();
}
public void send_getProgress(String sessionKey, String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
{
getProgress_args args = new getProgress_args();
args.setSessionKey(sessionKey);
args.setClassName(className);
args.setInterpreterContext(interpreterContext);
sendBase("getProgress", args);
}
public int recv_getProgress() throws org.apache.thrift.TException
{
getProgress_result result = new getProgress_result();
receiveBase(result, "getProgress");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getProgress failed: unknown result");
}
public String getFormType(String sessionKey, String className) throws org.apache.thrift.TException
{
send_getFormType(sessionKey, className);
return recv_getFormType();
}
public void send_getFormType(String sessionKey, String className) throws org.apache.thrift.TException
{
getFormType_args args = new getFormType_args();
args.setSessionKey(sessionKey);
args.setClassName(className);
sendBase("getFormType", args);
}
public String recv_getFormType() throws org.apache.thrift.TException
{
getFormType_result result = new getFormType_result();
receiveBase(result, "getFormType");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getFormType failed: unknown result");
}
public List<InterpreterCompletion> completion(String sessionKey, String className, String buf, int cursor) throws org.apache.thrift.TException
{
send_completion(sessionKey, className, buf, cursor);
return recv_completion();
}
public void send_completion(String sessionKey, String className, String buf, int cursor) throws org.apache.thrift.TException
{
completion_args args = new completion_args();
args.setSessionKey(sessionKey);
args.setClassName(className);
args.setBuf(buf);
args.setCursor(cursor);
sendBase("completion", args);
}
public List<InterpreterCompletion> recv_completion() throws org.apache.thrift.TException
{
completion_result result = new completion_result();
receiveBase(result, "completion");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "completion failed: unknown result");
}
public void shutdown() throws org.apache.thrift.TException
{
send_shutdown();
recv_shutdown();
}
public void send_shutdown() throws org.apache.thrift.TException
{
shutdown_args args = new shutdown_args();
sendBase("shutdown", args);
}
public void recv_shutdown() throws org.apache.thrift.TException
{
shutdown_result result = new shutdown_result();
receiveBase(result, "shutdown");
return;
}
public String getStatus(String sessionKey, String jobId) throws org.apache.thrift.TException
{
send_getStatus(sessionKey, jobId);
return recv_getStatus();
}
public void send_getStatus(String sessionKey, String jobId) throws org.apache.thrift.TException
{
getStatus_args args = new getStatus_args();
args.setSessionKey(sessionKey);
args.setJobId(jobId);
sendBase("getStatus", args);
}
public String recv_getStatus() throws org.apache.thrift.TException
{
getStatus_result result = new getStatus_result();
receiveBase(result, "getStatus");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getStatus failed: unknown result");
}
public RemoteInterpreterEvent getEvent() throws org.apache.thrift.TException
{
send_getEvent();
return recv_getEvent();
}
public void send_getEvent() throws org.apache.thrift.TException
{
getEvent_args args = new getEvent_args();
sendBase("getEvent", args);
}
public RemoteInterpreterEvent recv_getEvent() throws org.apache.thrift.TException
{
getEvent_result result = new getEvent_result();
receiveBase(result, "getEvent");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getEvent failed: unknown result");
}
public void resourcePoolResponseGetAll(List<String> resources) throws org.apache.thrift.TException
{
send_resourcePoolResponseGetAll(resources);
recv_resourcePoolResponseGetAll();
}
public void send_resourcePoolResponseGetAll(List<String> resources) throws org.apache.thrift.TException
{
resourcePoolResponseGetAll_args args = new resourcePoolResponseGetAll_args();
args.setResources(resources);
sendBase("resourcePoolResponseGetAll", args);
}
public void recv_resourcePoolResponseGetAll() throws org.apache.thrift.TException
{
resourcePoolResponseGetAll_result result = new resourcePoolResponseGetAll_result();
receiveBase(result, "resourcePoolResponseGetAll");
return;
}
public void resourceResponseGet(String resourceId, ByteBuffer object) throws org.apache.thrift.TException
{
send_resourceResponseGet(resourceId, object);
recv_resourceResponseGet();
}
public void send_resourceResponseGet(String resourceId, ByteBuffer object) throws org.apache.thrift.TException
{
resourceResponseGet_args args = new resourceResponseGet_args();
args.setResourceId(resourceId);
args.setObject(object);
sendBase("resourceResponseGet", args);
}
public void recv_resourceResponseGet() throws org.apache.thrift.TException
{
resourceResponseGet_result result = new resourceResponseGet_result();
receiveBase(result, "resourceResponseGet");
return;
}
public List<String> resourcePoolGetAll() throws org.apache.thrift.TException
{
send_resourcePoolGetAll();
return recv_resourcePoolGetAll();
}
public void send_resourcePoolGetAll() throws org.apache.thrift.TException
{
resourcePoolGetAll_args args = new resourcePoolGetAll_args();
sendBase("resourcePoolGetAll", args);
}
public List<String> recv_resourcePoolGetAll() throws org.apache.thrift.TException
{
resourcePoolGetAll_result result = new resourcePoolGetAll_result();
receiveBase(result, "resourcePoolGetAll");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "resourcePoolGetAll failed: unknown result");
}
public ByteBuffer resourceGet(String sessionKey, String paragraphId, String resourceName) throws org.apache.thrift.TException
{
send_resourceGet(sessionKey, paragraphId, resourceName);
return recv_resourceGet();
}
public void send_resourceGet(String sessionKey, String paragraphId, String resourceName) throws org.apache.thrift.TException
{
resourceGet_args args = new resourceGet_args();
args.setSessionKey(sessionKey);
args.setParagraphId(paragraphId);
args.setResourceName(resourceName);
sendBase("resourceGet", args);
}
public ByteBuffer recv_resourceGet() throws org.apache.thrift.TException
{
resourceGet_result result = new resourceGet_result();
receiveBase(result, "resourceGet");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "resourceGet failed: unknown result");
}
public boolean resourceRemove(String sessionKey, String paragraphId, String resourceName) throws org.apache.thrift.TException
{
send_resourceRemove(sessionKey, paragraphId, resourceName);
return recv_resourceRemove();
}
public void send_resourceRemove(String sessionKey, String paragraphId, String resourceName) throws org.apache.thrift.TException
{
resourceRemove_args args = new resourceRemove_args();
args.setSessionKey(sessionKey);
args.setParagraphId(paragraphId);
args.setResourceName(resourceName);
sendBase("resourceRemove", args);
}
public boolean recv_resourceRemove() throws org.apache.thrift.TException
{
resourceRemove_result result = new resourceRemove_result();
receiveBase(result, "resourceRemove");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "resourceRemove failed: unknown result");
}
public void angularObjectUpdate(String name, String sessionKey, String paragraphId, String object) throws org.apache.thrift.TException
{
send_angularObjectUpdate(name, sessionKey, paragraphId, object);
recv_angularObjectUpdate();
}
public void send_angularObjectUpdate(String name, String sessionKey, String paragraphId, String object) throws org.apache.thrift.TException
{
angularObjectUpdate_args args = new angularObjectUpdate_args();
args.setName(name);
args.setSessionKey(sessionKey);
args.setParagraphId(paragraphId);
args.setObject(object);
sendBase("angularObjectUpdate", args);
}
public void recv_angularObjectUpdate() throws org.apache.thrift.TException
{
angularObjectUpdate_result result = new angularObjectUpdate_result();
receiveBase(result, "angularObjectUpdate");
return;
}
public void angularObjectAdd(String name, String sessionKey, String paragraphId, String object) throws org.apache.thrift.TException
{
send_angularObjectAdd(name, sessionKey, paragraphId, object);
recv_angularObjectAdd();
}
public void send_angularObjectAdd(String name, String sessionKey, String paragraphId, String object) throws org.apache.thrift.TException
{
angularObjectAdd_args args = new angularObjectAdd_args();
args.setName(name);
args.setSessionKey(sessionKey);
args.setParagraphId(paragraphId);
args.setObject(object);
sendBase("angularObjectAdd", args);
}
public void recv_angularObjectAdd() throws org.apache.thrift.TException
{
angularObjectAdd_result result = new angularObjectAdd_result();
receiveBase(result, "angularObjectAdd");
return;
}
public void angularObjectRemove(String name, String sessionKey, String paragraphId) throws org.apache.thrift.TException
{
send_angularObjectRemove(name, sessionKey, paragraphId);
recv_angularObjectRemove();
}
public void send_angularObjectRemove(String name, String sessionKey, String paragraphId) throws org.apache.thrift.TException
{
angularObjectRemove_args args = new angularObjectRemove_args();
args.setName(name);
args.setSessionKey(sessionKey);
args.setParagraphId(paragraphId);
sendBase("angularObjectRemove", args);
}
public void recv_angularObjectRemove() throws org.apache.thrift.TException
{
angularObjectRemove_result result = new angularObjectRemove_result();
receiveBase(result, "angularObjectRemove");
return;
}
public void angularRegistryPush(String registry) throws org.apache.thrift.TException
{
send_angularRegistryPush(registry);
recv_angularRegistryPush();
}
public void send_angularRegistryPush(String registry) throws org.apache.thrift.TException
{
angularRegistryPush_args args = new angularRegistryPush_args();
args.setRegistry(registry);
sendBase("angularRegistryPush", args);
}
public void recv_angularRegistryPush() throws org.apache.thrift.TException
{
angularRegistryPush_result result = new angularRegistryPush_result();
receiveBase(result, "angularRegistryPush");
return;
}
public RemoteApplicationResult loadApplication(String applicationInstanceId, String packageInfo, String sessionKey, String paragraphId) throws org.apache.thrift.TException
{
send_loadApplication(applicationInstanceId, packageInfo, sessionKey, paragraphId);
return recv_loadApplication();
}
public void send_loadApplication(String applicationInstanceId, String packageInfo, String sessionKey, String paragraphId) throws org.apache.thrift.TException
{
loadApplication_args args = new loadApplication_args();
args.setApplicationInstanceId(applicationInstanceId);
args.setPackageInfo(packageInfo);
args.setSessionKey(sessionKey);
args.setParagraphId(paragraphId);
sendBase("loadApplication", args);
}
public RemoteApplicationResult recv_loadApplication() throws org.apache.thrift.TException
{
loadApplication_result result = new loadApplication_result();
receiveBase(result, "loadApplication");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "loadApplication failed: unknown result");
}
public RemoteApplicationResult unloadApplication(String applicationInstanceId) throws org.apache.thrift.TException
{
send_unloadApplication(applicationInstanceId);
return recv_unloadApplication();
}
public void send_unloadApplication(String applicationInstanceId) throws org.apache.thrift.TException
{
unloadApplication_args args = new unloadApplication_args();
args.setApplicationInstanceId(applicationInstanceId);
sendBase("unloadApplication", args);
}
public RemoteApplicationResult recv_unloadApplication() throws org.apache.thrift.TException
{
unloadApplication_result result = new unloadApplication_result();
receiveBase(result, "unloadApplication");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "unloadApplication failed: unknown result");
}
public RemoteApplicationResult runApplication(String applicationInstanceId) throws org.apache.thrift.TException
{
send_runApplication(applicationInstanceId);
return recv_runApplication();
}
public void send_runApplication(String applicationInstanceId) throws org.apache.thrift.TException
{
runApplication_args args = new runApplication_args();
args.setApplicationInstanceId(applicationInstanceId);
sendBase("runApplication", args);
}
public RemoteApplicationResult recv_runApplication() throws org.apache.thrift.TException
{
runApplication_result result = new runApplication_result();
receiveBase(result, "runApplication");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "runApplication failed: unknown result");
}
public void onReceivedZeppelinResource(String object) throws org.apache.thrift.TException
{
send_onReceivedZeppelinResource(object);
recv_onReceivedZeppelinResource();
}
public void send_onReceivedZeppelinResource(String object) throws org.apache.thrift.TException
{
onReceivedZeppelinResource_args args = new onReceivedZeppelinResource_args();
args.setObject(object);
sendBase("onReceivedZeppelinResource", args);
}
public void recv_onReceivedZeppelinResource() throws org.apache.thrift.TException
{
onReceivedZeppelinResource_result result = new onReceivedZeppelinResource_result();
receiveBase(result, "onReceivedZeppelinResource");
return;
}
}
public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
private org.apache.thrift.async.TAsyncClientManager clientManager;
private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
this.clientManager = clientManager;
this.protocolFactory = protocolFactory;
}
public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
return new AsyncClient(protocolFactory, clientManager, transport);
}
}
public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
super(protocolFactory, clientManager, transport);
}
public void createInterpreter(String intpGroupId, String sessionKey, String className, Map<String,String> properties, String userName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
createInterpreter_call method_call = new createInterpreter_call(intpGroupId, sessionKey, className, properties, userName, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class createInterpreter_call extends org.apache.thrift.async.TAsyncMethodCall {
private String intpGroupId;
private String sessionKey;
private String className;
private Map<String,String> properties;
private String userName;
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | true |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterContext.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterContext.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.zeppelin.interpreter.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2016-11-29")
public class RemoteInterpreterContext implements org.apache.thrift.TBase<RemoteInterpreterContext, RemoteInterpreterContext._Fields>, java.io.Serializable, Cloneable, Comparable<RemoteInterpreterContext> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RemoteInterpreterContext");
private static final org.apache.thrift.protocol.TField NOTE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("noteId", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField PARAGRAPH_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphId", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final org.apache.thrift.protocol.TField REPL_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("replName", org.apache.thrift.protocol.TType.STRING, (short)3);
private static final org.apache.thrift.protocol.TField PARAGRAPH_TITLE_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphTitle", org.apache.thrift.protocol.TType.STRING, (short)4);
private static final org.apache.thrift.protocol.TField PARAGRAPH_TEXT_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphText", org.apache.thrift.protocol.TType.STRING, (short)5);
private static final org.apache.thrift.protocol.TField AUTHENTICATION_INFO_FIELD_DESC = new org.apache.thrift.protocol.TField("authenticationInfo", org.apache.thrift.protocol.TType.STRING, (short)6);
private static final org.apache.thrift.protocol.TField CONFIG_FIELD_DESC = new org.apache.thrift.protocol.TField("config", org.apache.thrift.protocol.TType.STRING, (short)7);
private static final org.apache.thrift.protocol.TField GUI_FIELD_DESC = new org.apache.thrift.protocol.TField("gui", org.apache.thrift.protocol.TType.STRING, (short)8);
private static final org.apache.thrift.protocol.TField RUNNERS_FIELD_DESC = new org.apache.thrift.protocol.TField("runners", org.apache.thrift.protocol.TType.STRING, (short)9);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new RemoteInterpreterContextStandardSchemeFactory());
schemes.put(TupleScheme.class, new RemoteInterpreterContextTupleSchemeFactory());
}
public String noteId; // required
public String paragraphId; // required
public String replName; // required
public String paragraphTitle; // required
public String paragraphText; // required
public String authenticationInfo; // required
public String config; // required
public String gui; // required
public String runners; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
NOTE_ID((short)1, "noteId"),
PARAGRAPH_ID((short)2, "paragraphId"),
REPL_NAME((short)3, "replName"),
PARAGRAPH_TITLE((short)4, "paragraphTitle"),
PARAGRAPH_TEXT((short)5, "paragraphText"),
AUTHENTICATION_INFO((short)6, "authenticationInfo"),
CONFIG((short)7, "config"),
GUI((short)8, "gui"),
RUNNERS((short)9, "runners");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // NOTE_ID
return NOTE_ID;
case 2: // PARAGRAPH_ID
return PARAGRAPH_ID;
case 3: // REPL_NAME
return REPL_NAME;
case 4: // PARAGRAPH_TITLE
return PARAGRAPH_TITLE;
case 5: // PARAGRAPH_TEXT
return PARAGRAPH_TEXT;
case 6: // AUTHENTICATION_INFO
return AUTHENTICATION_INFO;
case 7: // CONFIG
return CONFIG;
case 8: // GUI
return GUI;
case 9: // RUNNERS
return RUNNERS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.NOTE_ID, new org.apache.thrift.meta_data.FieldMetaData("noteId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.PARAGRAPH_ID, new org.apache.thrift.meta_data.FieldMetaData("paragraphId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.REPL_NAME, new org.apache.thrift.meta_data.FieldMetaData("replName", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.PARAGRAPH_TITLE, new org.apache.thrift.meta_data.FieldMetaData("paragraphTitle", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.PARAGRAPH_TEXT, new org.apache.thrift.meta_data.FieldMetaData("paragraphText", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.AUTHENTICATION_INFO, new org.apache.thrift.meta_data.FieldMetaData("authenticationInfo", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.CONFIG, new org.apache.thrift.meta_data.FieldMetaData("config", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.GUI, new org.apache.thrift.meta_data.FieldMetaData("gui", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.RUNNERS, new org.apache.thrift.meta_data.FieldMetaData("runners", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RemoteInterpreterContext.class, metaDataMap);
}
public RemoteInterpreterContext() {
}
public RemoteInterpreterContext(
String noteId,
String paragraphId,
String replName,
String paragraphTitle,
String paragraphText,
String authenticationInfo,
String config,
String gui,
String runners)
{
this();
this.noteId = noteId;
this.paragraphId = paragraphId;
this.replName = replName;
this.paragraphTitle = paragraphTitle;
this.paragraphText = paragraphText;
this.authenticationInfo = authenticationInfo;
this.config = config;
this.gui = gui;
this.runners = runners;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public RemoteInterpreterContext(RemoteInterpreterContext other) {
if (other.isSetNoteId()) {
this.noteId = other.noteId;
}
if (other.isSetParagraphId()) {
this.paragraphId = other.paragraphId;
}
if (other.isSetReplName()) {
this.replName = other.replName;
}
if (other.isSetParagraphTitle()) {
this.paragraphTitle = other.paragraphTitle;
}
if (other.isSetParagraphText()) {
this.paragraphText = other.paragraphText;
}
if (other.isSetAuthenticationInfo()) {
this.authenticationInfo = other.authenticationInfo;
}
if (other.isSetConfig()) {
this.config = other.config;
}
if (other.isSetGui()) {
this.gui = other.gui;
}
if (other.isSetRunners()) {
this.runners = other.runners;
}
}
public RemoteInterpreterContext deepCopy() {
return new RemoteInterpreterContext(this);
}
@Override
public void clear() {
this.noteId = null;
this.paragraphId = null;
this.replName = null;
this.paragraphTitle = null;
this.paragraphText = null;
this.authenticationInfo = null;
this.config = null;
this.gui = null;
this.runners = null;
}
public String getNoteId() {
return this.noteId;
}
public RemoteInterpreterContext setNoteId(String noteId) {
this.noteId = noteId;
return this;
}
public void unsetNoteId() {
this.noteId = null;
}
/** Returns true if field noteId is set (has been assigned a value) and false otherwise */
public boolean isSetNoteId() {
return this.noteId != null;
}
public void setNoteIdIsSet(boolean value) {
if (!value) {
this.noteId = null;
}
}
public String getParagraphId() {
return this.paragraphId;
}
public RemoteInterpreterContext setParagraphId(String paragraphId) {
this.paragraphId = paragraphId;
return this;
}
public void unsetParagraphId() {
this.paragraphId = null;
}
/** Returns true if field paragraphId is set (has been assigned a value) and false otherwise */
public boolean isSetParagraphId() {
return this.paragraphId != null;
}
public void setParagraphIdIsSet(boolean value) {
if (!value) {
this.paragraphId = null;
}
}
public String getReplName() {
return this.replName;
}
public RemoteInterpreterContext setReplName(String replName) {
this.replName = replName;
return this;
}
public void unsetReplName() {
this.replName = null;
}
/** Returns true if field replName is set (has been assigned a value) and false otherwise */
public boolean isSetReplName() {
return this.replName != null;
}
public void setReplNameIsSet(boolean value) {
if (!value) {
this.replName = null;
}
}
public String getParagraphTitle() {
return this.paragraphTitle;
}
public RemoteInterpreterContext setParagraphTitle(String paragraphTitle) {
this.paragraphTitle = paragraphTitle;
return this;
}
public void unsetParagraphTitle() {
this.paragraphTitle = null;
}
/** Returns true if field paragraphTitle is set (has been assigned a value) and false otherwise */
public boolean isSetParagraphTitle() {
return this.paragraphTitle != null;
}
public void setParagraphTitleIsSet(boolean value) {
if (!value) {
this.paragraphTitle = null;
}
}
public String getParagraphText() {
return this.paragraphText;
}
public RemoteInterpreterContext setParagraphText(String paragraphText) {
this.paragraphText = paragraphText;
return this;
}
public void unsetParagraphText() {
this.paragraphText = null;
}
/** Returns true if field paragraphText is set (has been assigned a value) and false otherwise */
public boolean isSetParagraphText() {
return this.paragraphText != null;
}
public void setParagraphTextIsSet(boolean value) {
if (!value) {
this.paragraphText = null;
}
}
public String getAuthenticationInfo() {
return this.authenticationInfo;
}
public RemoteInterpreterContext setAuthenticationInfo(String authenticationInfo) {
this.authenticationInfo = authenticationInfo;
return this;
}
public void unsetAuthenticationInfo() {
this.authenticationInfo = null;
}
/** Returns true if field authenticationInfo is set (has been assigned a value) and false otherwise */
public boolean isSetAuthenticationInfo() {
return this.authenticationInfo != null;
}
public void setAuthenticationInfoIsSet(boolean value) {
if (!value) {
this.authenticationInfo = null;
}
}
public String getConfig() {
return this.config;
}
public RemoteInterpreterContext setConfig(String config) {
this.config = config;
return this;
}
public void unsetConfig() {
this.config = null;
}
/** Returns true if field config is set (has been assigned a value) and false otherwise */
public boolean isSetConfig() {
return this.config != null;
}
public void setConfigIsSet(boolean value) {
if (!value) {
this.config = null;
}
}
public String getGui() {
return this.gui;
}
public RemoteInterpreterContext setGui(String gui) {
this.gui = gui;
return this;
}
public void unsetGui() {
this.gui = null;
}
/** Returns true if field gui is set (has been assigned a value) and false otherwise */
public boolean isSetGui() {
return this.gui != null;
}
public void setGuiIsSet(boolean value) {
if (!value) {
this.gui = null;
}
}
public String getRunners() {
return this.runners;
}
public RemoteInterpreterContext setRunners(String runners) {
this.runners = runners;
return this;
}
public void unsetRunners() {
this.runners = null;
}
/** Returns true if field runners is set (has been assigned a value) and false otherwise */
public boolean isSetRunners() {
return this.runners != null;
}
public void setRunnersIsSet(boolean value) {
if (!value) {
this.runners = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case NOTE_ID:
if (value == null) {
unsetNoteId();
} else {
setNoteId((String)value);
}
break;
case PARAGRAPH_ID:
if (value == null) {
unsetParagraphId();
} else {
setParagraphId((String)value);
}
break;
case REPL_NAME:
if (value == null) {
unsetReplName();
} else {
setReplName((String)value);
}
break;
case PARAGRAPH_TITLE:
if (value == null) {
unsetParagraphTitle();
} else {
setParagraphTitle((String)value);
}
break;
case PARAGRAPH_TEXT:
if (value == null) {
unsetParagraphText();
} else {
setParagraphText((String)value);
}
break;
case AUTHENTICATION_INFO:
if (value == null) {
unsetAuthenticationInfo();
} else {
setAuthenticationInfo((String)value);
}
break;
case CONFIG:
if (value == null) {
unsetConfig();
} else {
setConfig((String)value);
}
break;
case GUI:
if (value == null) {
unsetGui();
} else {
setGui((String)value);
}
break;
case RUNNERS:
if (value == null) {
unsetRunners();
} else {
setRunners((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case NOTE_ID:
return getNoteId();
case PARAGRAPH_ID:
return getParagraphId();
case REPL_NAME:
return getReplName();
case PARAGRAPH_TITLE:
return getParagraphTitle();
case PARAGRAPH_TEXT:
return getParagraphText();
case AUTHENTICATION_INFO:
return getAuthenticationInfo();
case CONFIG:
return getConfig();
case GUI:
return getGui();
case RUNNERS:
return getRunners();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case NOTE_ID:
return isSetNoteId();
case PARAGRAPH_ID:
return isSetParagraphId();
case REPL_NAME:
return isSetReplName();
case PARAGRAPH_TITLE:
return isSetParagraphTitle();
case PARAGRAPH_TEXT:
return isSetParagraphText();
case AUTHENTICATION_INFO:
return isSetAuthenticationInfo();
case CONFIG:
return isSetConfig();
case GUI:
return isSetGui();
case RUNNERS:
return isSetRunners();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof RemoteInterpreterContext)
return this.equals((RemoteInterpreterContext)that);
return false;
}
public boolean equals(RemoteInterpreterContext that) {
if (that == null)
return false;
boolean this_present_noteId = true && this.isSetNoteId();
boolean that_present_noteId = true && that.isSetNoteId();
if (this_present_noteId || that_present_noteId) {
if (!(this_present_noteId && that_present_noteId))
return false;
if (!this.noteId.equals(that.noteId))
return false;
}
boolean this_present_paragraphId = true && this.isSetParagraphId();
boolean that_present_paragraphId = true && that.isSetParagraphId();
if (this_present_paragraphId || that_present_paragraphId) {
if (!(this_present_paragraphId && that_present_paragraphId))
return false;
if (!this.paragraphId.equals(that.paragraphId))
return false;
}
boolean this_present_replName = true && this.isSetReplName();
boolean that_present_replName = true && that.isSetReplName();
if (this_present_replName || that_present_replName) {
if (!(this_present_replName && that_present_replName))
return false;
if (!this.replName.equals(that.replName))
return false;
}
boolean this_present_paragraphTitle = true && this.isSetParagraphTitle();
boolean that_present_paragraphTitle = true && that.isSetParagraphTitle();
if (this_present_paragraphTitle || that_present_paragraphTitle) {
if (!(this_present_paragraphTitle && that_present_paragraphTitle))
return false;
if (!this.paragraphTitle.equals(that.paragraphTitle))
return false;
}
boolean this_present_paragraphText = true && this.isSetParagraphText();
boolean that_present_paragraphText = true && that.isSetParagraphText();
if (this_present_paragraphText || that_present_paragraphText) {
if (!(this_present_paragraphText && that_present_paragraphText))
return false;
if (!this.paragraphText.equals(that.paragraphText))
return false;
}
boolean this_present_authenticationInfo = true && this.isSetAuthenticationInfo();
boolean that_present_authenticationInfo = true && that.isSetAuthenticationInfo();
if (this_present_authenticationInfo || that_present_authenticationInfo) {
if (!(this_present_authenticationInfo && that_present_authenticationInfo))
return false;
if (!this.authenticationInfo.equals(that.authenticationInfo))
return false;
}
boolean this_present_config = true && this.isSetConfig();
boolean that_present_config = true && that.isSetConfig();
if (this_present_config || that_present_config) {
if (!(this_present_config && that_present_config))
return false;
if (!this.config.equals(that.config))
return false;
}
boolean this_present_gui = true && this.isSetGui();
boolean that_present_gui = true && that.isSetGui();
if (this_present_gui || that_present_gui) {
if (!(this_present_gui && that_present_gui))
return false;
if (!this.gui.equals(that.gui))
return false;
}
boolean this_present_runners = true && this.isSetRunners();
boolean that_present_runners = true && that.isSetRunners();
if (this_present_runners || that_present_runners) {
if (!(this_present_runners && that_present_runners))
return false;
if (!this.runners.equals(that.runners))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_noteId = true && (isSetNoteId());
list.add(present_noteId);
if (present_noteId)
list.add(noteId);
boolean present_paragraphId = true && (isSetParagraphId());
list.add(present_paragraphId);
if (present_paragraphId)
list.add(paragraphId);
boolean present_replName = true && (isSetReplName());
list.add(present_replName);
if (present_replName)
list.add(replName);
boolean present_paragraphTitle = true && (isSetParagraphTitle());
list.add(present_paragraphTitle);
if (present_paragraphTitle)
list.add(paragraphTitle);
boolean present_paragraphText = true && (isSetParagraphText());
list.add(present_paragraphText);
if (present_paragraphText)
list.add(paragraphText);
boolean present_authenticationInfo = true && (isSetAuthenticationInfo());
list.add(present_authenticationInfo);
if (present_authenticationInfo)
list.add(authenticationInfo);
boolean present_config = true && (isSetConfig());
list.add(present_config);
if (present_config)
list.add(config);
boolean present_gui = true && (isSetGui());
list.add(present_gui);
if (present_gui)
list.add(gui);
boolean present_runners = true && (isSetRunners());
list.add(present_runners);
if (present_runners)
list.add(runners);
return list.hashCode();
}
@Override
public int compareTo(RemoteInterpreterContext other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetNoteId()).compareTo(other.isSetNoteId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetNoteId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.noteId, other.noteId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetParagraphId()).compareTo(other.isSetParagraphId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetParagraphId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphId, other.paragraphId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetReplName()).compareTo(other.isSetReplName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetReplName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.replName, other.replName);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetParagraphTitle()).compareTo(other.isSetParagraphTitle());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetParagraphTitle()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphTitle, other.paragraphTitle);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetParagraphText()).compareTo(other.isSetParagraphText());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetParagraphText()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphText, other.paragraphText);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetAuthenticationInfo()).compareTo(other.isSetAuthenticationInfo());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetAuthenticationInfo()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.authenticationInfo, other.authenticationInfo);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetConfig()).compareTo(other.isSetConfig());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetConfig()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.config, other.config);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetGui()).compareTo(other.isSetGui());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetGui()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.gui, other.gui);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetRunners()).compareTo(other.isSetRunners());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetRunners()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.runners, other.runners);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("RemoteInterpreterContext(");
boolean first = true;
sb.append("noteId:");
if (this.noteId == null) {
sb.append("null");
} else {
sb.append(this.noteId);
}
first = false;
if (!first) sb.append(", ");
sb.append("paragraphId:");
if (this.paragraphId == null) {
sb.append("null");
} else {
sb.append(this.paragraphId);
}
first = false;
if (!first) sb.append(", ");
sb.append("replName:");
if (this.replName == null) {
sb.append("null");
} else {
sb.append(this.replName);
}
first = false;
if (!first) sb.append(", ");
sb.append("paragraphTitle:");
if (this.paragraphTitle == null) {
sb.append("null");
} else {
sb.append(this.paragraphTitle);
}
first = false;
if (!first) sb.append(", ");
sb.append("paragraphText:");
if (this.paragraphText == null) {
sb.append("null");
} else {
sb.append(this.paragraphText);
}
first = false;
if (!first) sb.append(", ");
sb.append("authenticationInfo:");
if (this.authenticationInfo == null) {
sb.append("null");
} else {
sb.append(this.authenticationInfo);
}
first = false;
if (!first) sb.append(", ");
sb.append("config:");
if (this.config == null) {
sb.append("null");
} else {
sb.append(this.config);
}
first = false;
if (!first) sb.append(", ");
sb.append("gui:");
if (this.gui == null) {
sb.append("null");
} else {
sb.append(this.gui);
}
first = false;
if (!first) sb.append(", ");
sb.append("runners:");
if (this.runners == null) {
sb.append("null");
} else {
sb.append(this.runners);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class RemoteInterpreterContextStandardSchemeFactory implements SchemeFactory {
public RemoteInterpreterContextStandardScheme getScheme() {
return new RemoteInterpreterContextStandardScheme();
}
}
private static class RemoteInterpreterContextStandardScheme extends StandardScheme<RemoteInterpreterContext> {
public void read(org.apache.thrift.protocol.TProtocol iprot, RemoteInterpreterContext struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // NOTE_ID
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.noteId = iprot.readString();
struct.setNoteIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // PARAGRAPH_ID
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.paragraphId = iprot.readString();
struct.setParagraphIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // REPL_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.replName = iprot.readString();
struct.setReplNameIsSet(true);
} else {
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | true |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterResult.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterResult.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.zeppelin.interpreter.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2016-11-29")
public class RemoteInterpreterResult implements org.apache.thrift.TBase<RemoteInterpreterResult, RemoteInterpreterResult._Fields>, java.io.Serializable, Cloneable, Comparable<RemoteInterpreterResult> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RemoteInterpreterResult");
private static final org.apache.thrift.protocol.TField CODE_FIELD_DESC = new org.apache.thrift.protocol.TField("code", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.LIST, (short)2);
private static final org.apache.thrift.protocol.TField CONFIG_FIELD_DESC = new org.apache.thrift.protocol.TField("config", org.apache.thrift.protocol.TType.STRING, (short)3);
private static final org.apache.thrift.protocol.TField GUI_FIELD_DESC = new org.apache.thrift.protocol.TField("gui", org.apache.thrift.protocol.TType.STRING, (short)4);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new RemoteInterpreterResultStandardSchemeFactory());
schemes.put(TupleScheme.class, new RemoteInterpreterResultTupleSchemeFactory());
}
public String code; // required
public List<RemoteInterpreterResultMessage> msg; // required
public String config; // required
public String gui; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
CODE((short)1, "code"),
MSG((short)2, "msg"),
CONFIG((short)3, "config"),
GUI((short)4, "gui");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // CODE
return CODE;
case 2: // MSG
return MSG;
case 3: // CONFIG
return CONFIG;
case 4: // GUI
return GUI;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.CODE, new org.apache.thrift.meta_data.FieldMetaData("code", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, RemoteInterpreterResultMessage.class))));
tmpMap.put(_Fields.CONFIG, new org.apache.thrift.meta_data.FieldMetaData("config", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.GUI, new org.apache.thrift.meta_data.FieldMetaData("gui", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RemoteInterpreterResult.class, metaDataMap);
}
public RemoteInterpreterResult() {
}
public RemoteInterpreterResult(
String code,
List<RemoteInterpreterResultMessage> msg,
String config,
String gui)
{
this();
this.code = code;
this.msg = msg;
this.config = config;
this.gui = gui;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public RemoteInterpreterResult(RemoteInterpreterResult other) {
if (other.isSetCode()) {
this.code = other.code;
}
if (other.isSetMsg()) {
List<RemoteInterpreterResultMessage> __this__msg = new ArrayList<RemoteInterpreterResultMessage>(other.msg.size());
for (RemoteInterpreterResultMessage other_element : other.msg) {
__this__msg.add(new RemoteInterpreterResultMessage(other_element));
}
this.msg = __this__msg;
}
if (other.isSetConfig()) {
this.config = other.config;
}
if (other.isSetGui()) {
this.gui = other.gui;
}
}
public RemoteInterpreterResult deepCopy() {
return new RemoteInterpreterResult(this);
}
@Override
public void clear() {
this.code = null;
this.msg = null;
this.config = null;
this.gui = null;
}
public String getCode() {
return this.code;
}
public RemoteInterpreterResult setCode(String code) {
this.code = code;
return this;
}
public void unsetCode() {
this.code = null;
}
/** Returns true if field code is set (has been assigned a value) and false otherwise */
public boolean isSetCode() {
return this.code != null;
}
public void setCodeIsSet(boolean value) {
if (!value) {
this.code = null;
}
}
public int getMsgSize() {
return (this.msg == null) ? 0 : this.msg.size();
}
public java.util.Iterator<RemoteInterpreterResultMessage> getMsgIterator() {
return (this.msg == null) ? null : this.msg.iterator();
}
public void addToMsg(RemoteInterpreterResultMessage elem) {
if (this.msg == null) {
this.msg = new ArrayList<RemoteInterpreterResultMessage>();
}
this.msg.add(elem);
}
public List<RemoteInterpreterResultMessage> getMsg() {
return this.msg;
}
public RemoteInterpreterResult setMsg(List<RemoteInterpreterResultMessage> msg) {
this.msg = msg;
return this;
}
public void unsetMsg() {
this.msg = null;
}
/** Returns true if field msg is set (has been assigned a value) and false otherwise */
public boolean isSetMsg() {
return this.msg != null;
}
public void setMsgIsSet(boolean value) {
if (!value) {
this.msg = null;
}
}
public String getConfig() {
return this.config;
}
public RemoteInterpreterResult setConfig(String config) {
this.config = config;
return this;
}
public void unsetConfig() {
this.config = null;
}
/** Returns true if field config is set (has been assigned a value) and false otherwise */
public boolean isSetConfig() {
return this.config != null;
}
public void setConfigIsSet(boolean value) {
if (!value) {
this.config = null;
}
}
public String getGui() {
return this.gui;
}
public RemoteInterpreterResult setGui(String gui) {
this.gui = gui;
return this;
}
public void unsetGui() {
this.gui = null;
}
/** Returns true if field gui is set (has been assigned a value) and false otherwise */
public boolean isSetGui() {
return this.gui != null;
}
public void setGuiIsSet(boolean value) {
if (!value) {
this.gui = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case CODE:
if (value == null) {
unsetCode();
} else {
setCode((String)value);
}
break;
case MSG:
if (value == null) {
unsetMsg();
} else {
setMsg((List<RemoteInterpreterResultMessage>)value);
}
break;
case CONFIG:
if (value == null) {
unsetConfig();
} else {
setConfig((String)value);
}
break;
case GUI:
if (value == null) {
unsetGui();
} else {
setGui((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case CODE:
return getCode();
case MSG:
return getMsg();
case CONFIG:
return getConfig();
case GUI:
return getGui();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case CODE:
return isSetCode();
case MSG:
return isSetMsg();
case CONFIG:
return isSetConfig();
case GUI:
return isSetGui();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof RemoteInterpreterResult)
return this.equals((RemoteInterpreterResult)that);
return false;
}
public boolean equals(RemoteInterpreterResult that) {
if (that == null)
return false;
boolean this_present_code = true && this.isSetCode();
boolean that_present_code = true && that.isSetCode();
if (this_present_code || that_present_code) {
if (!(this_present_code && that_present_code))
return false;
if (!this.code.equals(that.code))
return false;
}
boolean this_present_msg = true && this.isSetMsg();
boolean that_present_msg = true && that.isSetMsg();
if (this_present_msg || that_present_msg) {
if (!(this_present_msg && that_present_msg))
return false;
if (!this.msg.equals(that.msg))
return false;
}
boolean this_present_config = true && this.isSetConfig();
boolean that_present_config = true && that.isSetConfig();
if (this_present_config || that_present_config) {
if (!(this_present_config && that_present_config))
return false;
if (!this.config.equals(that.config))
return false;
}
boolean this_present_gui = true && this.isSetGui();
boolean that_present_gui = true && that.isSetGui();
if (this_present_gui || that_present_gui) {
if (!(this_present_gui && that_present_gui))
return false;
if (!this.gui.equals(that.gui))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_code = true && (isSetCode());
list.add(present_code);
if (present_code)
list.add(code);
boolean present_msg = true && (isSetMsg());
list.add(present_msg);
if (present_msg)
list.add(msg);
boolean present_config = true && (isSetConfig());
list.add(present_config);
if (present_config)
list.add(config);
boolean present_gui = true && (isSetGui());
list.add(present_gui);
if (present_gui)
list.add(gui);
return list.hashCode();
}
@Override
public int compareTo(RemoteInterpreterResult other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetCode()).compareTo(other.isSetCode());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetCode()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.code, other.code);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetMsg()).compareTo(other.isSetMsg());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMsg()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetConfig()).compareTo(other.isSetConfig());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetConfig()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.config, other.config);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetGui()).compareTo(other.isSetGui());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetGui()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.gui, other.gui);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("RemoteInterpreterResult(");
boolean first = true;
sb.append("code:");
if (this.code == null) {
sb.append("null");
} else {
sb.append(this.code);
}
first = false;
if (!first) sb.append(", ");
sb.append("msg:");
if (this.msg == null) {
sb.append("null");
} else {
sb.append(this.msg);
}
first = false;
if (!first) sb.append(", ");
sb.append("config:");
if (this.config == null) {
sb.append("null");
} else {
sb.append(this.config);
}
first = false;
if (!first) sb.append(", ");
sb.append("gui:");
if (this.gui == null) {
sb.append("null");
} else {
sb.append(this.gui);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class RemoteInterpreterResultStandardSchemeFactory implements SchemeFactory {
public RemoteInterpreterResultStandardScheme getScheme() {
return new RemoteInterpreterResultStandardScheme();
}
}
private static class RemoteInterpreterResultStandardScheme extends StandardScheme<RemoteInterpreterResult> {
public void read(org.apache.thrift.protocol.TProtocol iprot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // CODE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.code = iprot.readString();
struct.setCodeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // MSG
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
struct.msg = new ArrayList<RemoteInterpreterResultMessage>(_list0.size);
RemoteInterpreterResultMessage _elem1;
for (int _i2 = 0; _i2 < _list0.size; ++_i2)
{
_elem1 = new RemoteInterpreterResultMessage();
_elem1.read(iprot);
struct.msg.add(_elem1);
}
iprot.readListEnd();
}
struct.setMsgIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // CONFIG
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.config = iprot.readString();
struct.setConfigIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // GUI
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.gui = iprot.readString();
struct.setGuiIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.code != null) {
oprot.writeFieldBegin(CODE_FIELD_DESC);
oprot.writeString(struct.code);
oprot.writeFieldEnd();
}
if (struct.msg != null) {
oprot.writeFieldBegin(MSG_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.msg.size()));
for (RemoteInterpreterResultMessage _iter3 : struct.msg)
{
_iter3.write(oprot);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
if (struct.config != null) {
oprot.writeFieldBegin(CONFIG_FIELD_DESC);
oprot.writeString(struct.config);
oprot.writeFieldEnd();
}
if (struct.gui != null) {
oprot.writeFieldBegin(GUI_FIELD_DESC);
oprot.writeString(struct.gui);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class RemoteInterpreterResultTupleSchemeFactory implements SchemeFactory {
public RemoteInterpreterResultTupleScheme getScheme() {
return new RemoteInterpreterResultTupleScheme();
}
}
private static class RemoteInterpreterResultTupleScheme extends TupleScheme<RemoteInterpreterResult> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetCode()) {
optionals.set(0);
}
if (struct.isSetMsg()) {
optionals.set(1);
}
if (struct.isSetConfig()) {
optionals.set(2);
}
if (struct.isSetGui()) {
optionals.set(3);
}
oprot.writeBitSet(optionals, 4);
if (struct.isSetCode()) {
oprot.writeString(struct.code);
}
if (struct.isSetMsg()) {
{
oprot.writeI32(struct.msg.size());
for (RemoteInterpreterResultMessage _iter4 : struct.msg)
{
_iter4.write(oprot);
}
}
}
if (struct.isSetConfig()) {
oprot.writeString(struct.config);
}
if (struct.isSetGui()) {
oprot.writeString(struct.gui);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(4);
if (incoming.get(0)) {
struct.code = iprot.readString();
struct.setCodeIsSet(true);
}
if (incoming.get(1)) {
{
org.apache.thrift.protocol.TList _list5 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
struct.msg = new ArrayList<RemoteInterpreterResultMessage>(_list5.size);
RemoteInterpreterResultMessage _elem6;
for (int _i7 = 0; _i7 < _list5.size; ++_i7)
{
_elem6 = new RemoteInterpreterResultMessage();
_elem6.read(iprot);
struct.msg.add(_elem6);
}
}
struct.setMsgIsSet(true);
}
if (incoming.get(2)) {
struct.config = iprot.readString();
struct.setConfigIsSet(true);
}
if (incoming.get(3)) {
struct.gui = iprot.readString();
struct.setGuiIsSet(true);
}
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterEvent.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterEvent.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.zeppelin.interpreter.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2016-11-29")
public class RemoteInterpreterEvent implements org.apache.thrift.TBase<RemoteInterpreterEvent, RemoteInterpreterEvent._Fields>, java.io.Serializable, Cloneable, Comparable<RemoteInterpreterEvent> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RemoteInterpreterEvent");
private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.I32, (short)1);
private static final org.apache.thrift.protocol.TField DATA_FIELD_DESC = new org.apache.thrift.protocol.TField("data", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new RemoteInterpreterEventStandardSchemeFactory());
schemes.put(TupleScheme.class, new RemoteInterpreterEventTupleSchemeFactory());
}
/**
*
* @see RemoteInterpreterEventType
*/
public RemoteInterpreterEventType type; // required
public String data; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
/**
*
* @see RemoteInterpreterEventType
*/
TYPE((short)1, "type"),
DATA((short)2, "data");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // TYPE
return TYPE;
case 2: // DATA
return DATA;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, RemoteInterpreterEventType.class)));
tmpMap.put(_Fields.DATA, new org.apache.thrift.meta_data.FieldMetaData("data", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RemoteInterpreterEvent.class, metaDataMap);
}
public RemoteInterpreterEvent() {
}
public RemoteInterpreterEvent(
RemoteInterpreterEventType type,
String data)
{
this();
this.type = type;
this.data = data;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public RemoteInterpreterEvent(RemoteInterpreterEvent other) {
if (other.isSetType()) {
this.type = other.type;
}
if (other.isSetData()) {
this.data = other.data;
}
}
public RemoteInterpreterEvent deepCopy() {
return new RemoteInterpreterEvent(this);
}
@Override
public void clear() {
this.type = null;
this.data = null;
}
/**
*
* @see RemoteInterpreterEventType
*/
public RemoteInterpreterEventType getType() {
return this.type;
}
/**
*
* @see RemoteInterpreterEventType
*/
public RemoteInterpreterEvent setType(RemoteInterpreterEventType type) {
this.type = type;
return this;
}
public void unsetType() {
this.type = null;
}
/** Returns true if field type is set (has been assigned a value) and false otherwise */
public boolean isSetType() {
return this.type != null;
}
public void setTypeIsSet(boolean value) {
if (!value) {
this.type = null;
}
}
public String getData() {
return this.data;
}
public RemoteInterpreterEvent setData(String data) {
this.data = data;
return this;
}
public void unsetData() {
this.data = null;
}
/** Returns true if field data is set (has been assigned a value) and false otherwise */
public boolean isSetData() {
return this.data != null;
}
public void setDataIsSet(boolean value) {
if (!value) {
this.data = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case TYPE:
if (value == null) {
unsetType();
} else {
setType((RemoteInterpreterEventType)value);
}
break;
case DATA:
if (value == null) {
unsetData();
} else {
setData((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case TYPE:
return getType();
case DATA:
return getData();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case TYPE:
return isSetType();
case DATA:
return isSetData();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof RemoteInterpreterEvent)
return this.equals((RemoteInterpreterEvent)that);
return false;
}
public boolean equals(RemoteInterpreterEvent that) {
if (that == null)
return false;
boolean this_present_type = true && this.isSetType();
boolean that_present_type = true && that.isSetType();
if (this_present_type || that_present_type) {
if (!(this_present_type && that_present_type))
return false;
if (!this.type.equals(that.type))
return false;
}
boolean this_present_data = true && this.isSetData();
boolean that_present_data = true && that.isSetData();
if (this_present_data || that_present_data) {
if (!(this_present_data && that_present_data))
return false;
if (!this.data.equals(that.data))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_type = true && (isSetType());
list.add(present_type);
if (present_type)
list.add(type.getValue());
boolean present_data = true && (isSetData());
list.add(present_data);
if (present_data)
list.add(data);
return list.hashCode();
}
@Override
public int compareTo(RemoteInterpreterEvent other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetType()).compareTo(other.isSetType());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetType()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, other.type);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetData()).compareTo(other.isSetData());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetData()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.data, other.data);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("RemoteInterpreterEvent(");
boolean first = true;
sb.append("type:");
if (this.type == null) {
sb.append("null");
} else {
sb.append(this.type);
}
first = false;
if (!first) sb.append(", ");
sb.append("data:");
if (this.data == null) {
sb.append("null");
} else {
sb.append(this.data);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class RemoteInterpreterEventStandardSchemeFactory implements SchemeFactory {
public RemoteInterpreterEventStandardScheme getScheme() {
return new RemoteInterpreterEventStandardScheme();
}
}
private static class RemoteInterpreterEventStandardScheme extends StandardScheme<RemoteInterpreterEvent> {
public void read(org.apache.thrift.protocol.TProtocol iprot, RemoteInterpreterEvent struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // TYPE
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.type = org.apache.zeppelin.interpreter.thrift.RemoteInterpreterEventType.findByValue(iprot.readI32());
struct.setTypeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // DATA
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.data = iprot.readString();
struct.setDataIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, RemoteInterpreterEvent struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.type != null) {
oprot.writeFieldBegin(TYPE_FIELD_DESC);
oprot.writeI32(struct.type.getValue());
oprot.writeFieldEnd();
}
if (struct.data != null) {
oprot.writeFieldBegin(DATA_FIELD_DESC);
oprot.writeString(struct.data);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class RemoteInterpreterEventTupleSchemeFactory implements SchemeFactory {
public RemoteInterpreterEventTupleScheme getScheme() {
return new RemoteInterpreterEventTupleScheme();
}
}
private static class RemoteInterpreterEventTupleScheme extends TupleScheme<RemoteInterpreterEvent> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterEvent struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetType()) {
optionals.set(0);
}
if (struct.isSetData()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetType()) {
oprot.writeI32(struct.type.getValue());
}
if (struct.isSetData()) {
oprot.writeString(struct.data);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterEvent struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.type = org.apache.zeppelin.interpreter.thrift.RemoteInterpreterEventType.findByValue(iprot.readI32());
struct.setTypeIsSet(true);
}
if (incoming.get(1)) {
struct.data = iprot.readString();
struct.setDataIsSet(true);
}
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteEventClient.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteEventClient.java | package org.apache.zeppelin.interpreter.remote;
import java.util.Map;
/**
*
* Wrapper arnd RemoteInterpreterEventClient
* to expose methods in the client
*
*/
public class RemoteEventClient implements RemoteEventClientWrapper {
private RemoteInterpreterEventClient client;
public RemoteEventClient(RemoteInterpreterEventClient client) {
this.client = client;
}
@Override
public void onMetaInfosReceived(Map<String, String> infos) {
client.onMetaInfosReceived(infos);
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteAngularObjectRegistry.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteAngularObjectRegistry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import java.util.List;
import org.apache.thrift.TException;
import org.apache.zeppelin.display.AngularObject;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.display.AngularObjectRegistryListener;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
/**
* Proxy for AngularObjectRegistry that exists in remote interpreter process
*/
public class RemoteAngularObjectRegistry extends AngularObjectRegistry {
Logger logger = LoggerFactory.getLogger(RemoteAngularObjectRegistry.class);
private InterpreterGroup interpreterGroup;
public RemoteAngularObjectRegistry(String interpreterId,
AngularObjectRegistryListener listener,
InterpreterGroup interpreterGroup) {
super(interpreterId, listener);
this.interpreterGroup = interpreterGroup;
}
private RemoteInterpreterProcess getRemoteInterpreterProcess() {
return interpreterGroup.getRemoteInterpreterProcess();
}
/**
* When ZeppelinServer side code want to add angularObject to the registry,
* this method should be used instead of add()
* @param name
* @param o
* @param noteId
* @return
*/
public AngularObject addAndNotifyRemoteProcess(String name, Object o, String noteId, String
paragraphId) {
Gson gson = new Gson();
RemoteInterpreterProcess remoteInterpreterProcess = getRemoteInterpreterProcess();
if (!remoteInterpreterProcess.isRunning()) {
return super.add(name, o, noteId, paragraphId, true);
}
Client client = null;
boolean broken = false;
try {
client = remoteInterpreterProcess.getClient();
client.angularObjectAdd(name, noteId, paragraphId, gson.toJson(o));
return super.add(name, o, noteId, paragraphId, true);
} catch (TException e) {
broken = true;
logger.error("Error", e);
} catch (Exception e) {
logger.error("Error", e);
} finally {
if (client != null) {
remoteInterpreterProcess.releaseClient(client, broken);
}
}
return null;
}
/**
* When ZeppelinServer side code want to remove angularObject from the registry,
* this method should be used instead of remove()
* @param name
* @param noteId
* @param paragraphId
* @return
*/
public AngularObject removeAndNotifyRemoteProcess(String name, String noteId, String
paragraphId) {
RemoteInterpreterProcess remoteInterpreterProcess = getRemoteInterpreterProcess();
if (remoteInterpreterProcess == null || !remoteInterpreterProcess.isRunning()) {
return super.remove(name, noteId, paragraphId);
}
Client client = null;
boolean broken = false;
try {
client = remoteInterpreterProcess.getClient();
client.angularObjectRemove(name, noteId, paragraphId);
return super.remove(name, noteId, paragraphId);
} catch (TException e) {
broken = true;
logger.error("Error", e);
} catch (Exception e) {
logger.error("Error", e);
} finally {
if (client != null) {
remoteInterpreterProcess.releaseClient(client, broken);
}
}
return null;
}
public void removeAllAndNotifyRemoteProcess(String noteId, String paragraphId) {
List<AngularObject> all = getAll(noteId, paragraphId);
for (AngularObject ao : all) {
removeAndNotifyRemoteProcess(ao.getName(), noteId, paragraphId);
}
}
@Override
protected AngularObject createNewAngularObject(String name, Object o, String noteId, String
paragraphId) {
return new RemoteAngularObject(name, o, noteId, paragraphId, interpreterGroup,
getAngularObjectListener());
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteEventClientWrapper.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteEventClientWrapper.java | package org.apache.zeppelin.interpreter.remote;
import java.util.Map;
/**
*
* Wrapper interface for RemoterInterpreterEventClient
* to expose only required methods from EventClient
*
*/
public interface RemoteEventClientWrapper {
public void onMetaInfosReceived(Map<String, String> infos);
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/InterpreterContextRunnerPool.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/InterpreterContextRunnerPool.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.zeppelin.interpreter.InterpreterContextRunner;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
*/
public class InterpreterContextRunnerPool {
Logger logger = LoggerFactory.getLogger(InterpreterContextRunnerPool.class);
private Map<String, List<InterpreterContextRunner>> interpreterContextRunners;
public InterpreterContextRunnerPool() {
interpreterContextRunners = new HashMap<>();
}
// add runner
public void add(String noteId, InterpreterContextRunner runner) {
synchronized (interpreterContextRunners) {
if (!interpreterContextRunners.containsKey(noteId)) {
interpreterContextRunners.put(noteId, new LinkedList<InterpreterContextRunner>());
}
interpreterContextRunners.get(noteId).add(runner);
}
}
// replace all runners to noteId
public void addAll(String noteId, List<InterpreterContextRunner> runners) {
synchronized (interpreterContextRunners) {
if (!interpreterContextRunners.containsKey(noteId)) {
interpreterContextRunners.put(noteId, new LinkedList<InterpreterContextRunner>());
}
interpreterContextRunners.get(noteId).addAll(runners);
}
}
public void clear(String noteId) {
synchronized (interpreterContextRunners) {
interpreterContextRunners.remove(noteId);
}
}
public void run(String noteId, String paragraphId) {
synchronized (interpreterContextRunners) {
List<InterpreterContextRunner> list = interpreterContextRunners.get(noteId);
if (list != null) {
for (InterpreterContextRunner r : list) {
if (noteId.equals(r.getNoteId()) && paragraphId.equals(r.getParagraphId())) {
logger.info("run paragraph {} on note {} from InterpreterContext",
r.getParagraphId(), r.getNoteId());
r.run();
return;
}
}
}
throw new InterpreterException("Can not run paragraph " + paragraphId + " on " + noteId);
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/AppendOutputBuffer.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/AppendOutputBuffer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
/**
* This element stores the buffered
* append-data of paragraph's output.
*/
public class AppendOutputBuffer {
private String noteId;
private String paragraphId;
private int index;
private String data;
public AppendOutputBuffer(String noteId, String paragraphId, int index, String data) {
this.noteId = noteId;
this.paragraphId = paragraphId;
this.index = index;
this.data = data;
}
public String getNoteId() {
return noteId;
}
public String getParagraphId() {
return paragraphId;
}
public int getIndex() {
return index;
}
public String getData() {
return data;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterManagedProcess.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterManagedProcess.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import org.apache.commons.exec.*;
import org.apache.commons.exec.environment.EnvironmentUtils;
import org.apache.zeppelin.helium.ApplicationEventListener;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Map;
/**
* This class manages start / stop of remote interpreter process
*/
public class RemoteInterpreterManagedProcess extends RemoteInterpreterProcess
implements ExecuteResultHandler {
private static final Logger logger = LoggerFactory.getLogger(
RemoteInterpreterManagedProcess.class);
private final String interpreterRunner;
private DefaultExecutor executor;
private ExecuteWatchdog watchdog;
boolean running = false;
private int port = -1;
private final String interpreterDir;
private final String localRepoDir;
private Map<String, String> env;
public RemoteInterpreterManagedProcess(
String intpRunner,
String intpDir,
String localRepoDir,
Map<String, String> env,
int connectTimeout,
RemoteInterpreterProcessListener listener,
ApplicationEventListener appListener) {
super(new RemoteInterpreterEventPoller(listener, appListener),
connectTimeout);
this.interpreterRunner = intpRunner;
this.env = env;
this.interpreterDir = intpDir;
this.localRepoDir = localRepoDir;
}
RemoteInterpreterManagedProcess(String intpRunner,
String intpDir,
String localRepoDir,
Map<String, String> env,
RemoteInterpreterEventPoller remoteInterpreterEventPoller,
int connectTimeout) {
super(remoteInterpreterEventPoller,
connectTimeout);
this.interpreterRunner = intpRunner;
this.env = env;
this.interpreterDir = intpDir;
this.localRepoDir = localRepoDir;
}
@Override
public String getHost() {
return "localhost";
}
@Override
public int getPort() {
return port;
}
@Override
public void start(String userName, Boolean isUserImpersonate) {
// start server process
try {
port = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces();
} catch (IOException e1) {
throw new InterpreterException(e1);
}
CommandLine cmdLine = CommandLine.parse(interpreterRunner);
cmdLine.addArgument("-d", false);
cmdLine.addArgument(interpreterDir, false);
cmdLine.addArgument("-p", false);
cmdLine.addArgument(Integer.toString(port), false);
if (isUserImpersonate && !userName.equals("anonymous")) {
cmdLine.addArgument("-u", false);
cmdLine.addArgument(userName, false);
}
cmdLine.addArgument("-l", false);
cmdLine.addArgument(localRepoDir, false);
executor = new DefaultExecutor();
ByteArrayOutputStream cmdOut = new ByteArrayOutputStream();
ProcessLogOutputStream processOutput = new ProcessLogOutputStream(logger);
processOutput.setOutputStream(cmdOut);
executor.setStreamHandler(new PumpStreamHandler(processOutput));
watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
executor.setWatchdog(watchdog);
try {
Map procEnv = EnvironmentUtils.getProcEnvironment();
procEnv.putAll(env);
logger.info("Run interpreter process {}", cmdLine);
executor.execute(cmdLine, procEnv, this);
running = true;
} catch (IOException e) {
running = false;
throw new InterpreterException(e);
}
long startTime = System.currentTimeMillis();
while (System.currentTimeMillis() - startTime < getConnectTimeout()) {
if (!running) {
try {
cmdOut.flush();
} catch (IOException e) {
// nothing to do
}
throw new InterpreterException(new String(cmdOut.toByteArray()));
}
try {
if (RemoteInterpreterUtils.checkIfRemoteEndpointAccessible("localhost", port)) {
break;
} else {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
logger.error("Exception in RemoteInterpreterProcess while synchronized reference " +
"Thread.sleep", e);
}
}
} catch (Exception e) {
if (logger.isDebugEnabled()) {
logger.debug("Remote interpreter not yet accessible at localhost:" + port);
}
}
}
processOutput.setOutputStream(null);
}
public void stop() {
if (isRunning()) {
logger.info("kill interpreter process");
watchdog.destroyProcess();
}
executor = null;
watchdog = null;
running = false;
logger.info("Remote process terminated");
}
@Override
public void onProcessComplete(int exitValue) {
logger.info("Interpreter process exited {}", exitValue);
running = false;
}
@Override
public void onProcessFailed(ExecuteException e) {
logger.info("Interpreter process failed {}", e);
running = false;
}
public boolean isRunning() {
return running;
}
private static class ProcessLogOutputStream extends LogOutputStream {
private Logger logger;
OutputStream out;
public ProcessLogOutputStream(Logger logger) {
this.logger = logger;
}
@Override
protected void processLine(String s, int i) {
this.logger.debug(s);
}
@Override
public void write(byte [] b) throws IOException {
super.write(b);
if (out != null) {
synchronized (this) {
if (out != null) {
out.write(b);
}
}
}
}
@Override
public void write(byte [] b, int offset, int len) throws IOException {
super.write(b, offset, len);
if (out != null) {
synchronized (this) {
if (out != null) {
out.write(b, offset, len);
}
}
}
}
public void setOutputStream(OutputStream out) {
synchronized (this) {
this.out = out;
}
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterUtils.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.ConnectException;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
/**
*
*/
public class RemoteInterpreterUtils {
static Logger LOGGER = LoggerFactory.getLogger(RemoteInterpreterUtils.class);
public static int findRandomAvailablePortOnAllLocalInterfaces() throws IOException {
int port;
try (ServerSocket socket = new ServerSocket(0);) {
port = socket.getLocalPort();
socket.close();
}
return port;
}
public static boolean checkIfRemoteEndpointAccessible(String host, int port) {
try {
Socket discover = new Socket();
discover.setSoTimeout(1000);
discover.connect(new InetSocketAddress(host, port), 1000);
discover.close();
return true;
} catch (ConnectException cne) {
// end point is not accessible
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Remote endpoint '" + host + ":" + port + "' is not accessible " +
"(might be initializing): " + cne.getMessage());
}
return false;
} catch (IOException ioe) {
// end point is not accessible
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Remote endpoint '" + host + ":" + port + "' is not accessible " +
"(might be initializing): " + ioe.getMessage());
}
return false;
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterContextRunner.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterContextRunner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import org.apache.zeppelin.interpreter.InterpreterContextRunner;
import org.apache.zeppelin.interpreter.InterpreterException;
/**
*
*/
public class RemoteInterpreterContextRunner extends InterpreterContextRunner {
public RemoteInterpreterContextRunner(String noteId, String paragraphId) {
super(noteId, paragraphId);
}
@Override
public void run() {
// this class should be used only for gson deserialize abstract class
// code should not reach here
throw new InterpreterException("Assert");
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterEventPoller.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterEventPoller.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.apache.thrift.TException;
import org.apache.zeppelin.display.AngularObject;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.helium.ApplicationEventListener;
import org.apache.zeppelin.interpreter.InterpreterContextRunner;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.RemoteZeppelinServerResource;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterEvent;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterEventType;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
import org.apache.zeppelin.interpreter.thrift.ZeppelinServerResourceParagraphRunner;
import org.apache.zeppelin.resource.Resource;
import org.apache.zeppelin.resource.ResourceId;
import org.apache.zeppelin.resource.ResourcePool;
import org.apache.zeppelin.resource.ResourceSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
/**
* Processes message from RemoteInterpreter process
*/
public class RemoteInterpreterEventPoller extends Thread {
private static final Logger logger = LoggerFactory.getLogger(RemoteInterpreterEventPoller.class);
private final ScheduledExecutorService appendService =
Executors.newSingleThreadScheduledExecutor();
private final RemoteInterpreterProcessListener listener;
private final ApplicationEventListener appListener;
private volatile boolean shutdown;
private RemoteInterpreterProcess interpreterProcess;
private InterpreterGroup interpreterGroup;
public RemoteInterpreterEventPoller(
RemoteInterpreterProcessListener listener,
ApplicationEventListener appListener) {
this.listener = listener;
this.appListener = appListener;
shutdown = false;
}
public void setInterpreterProcess(RemoteInterpreterProcess interpreterProcess) {
this.interpreterProcess = interpreterProcess;
}
public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
this.interpreterGroup = interpreterGroup;
}
@Override
public void run() {
Client client = null;
AppendOutputRunner runner = new AppendOutputRunner(listener);
ScheduledFuture<?> appendFuture = appendService.scheduleWithFixedDelay(
runner, 0, AppendOutputRunner.BUFFER_TIME_MS, TimeUnit.MILLISECONDS);
while (!shutdown) {
// wait and retry
if (!interpreterProcess.isRunning()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// nothing to do
}
continue;
}
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
logger.error("Can't get RemoteInterpreterEvent", e1);
waitQuietly();
continue;
}
RemoteInterpreterEvent event = null;
boolean broken = false;
try {
event = client.getEvent();
} catch (TException e) {
broken = true;
logger.error("Can't get RemoteInterpreterEvent", e);
waitQuietly();
continue;
} finally {
interpreterProcess.releaseClient(client, broken);
}
Gson gson = new Gson();
AngularObjectRegistry angularObjectRegistry = interpreterGroup.getAngularObjectRegistry();
try {
if (event.getType() == RemoteInterpreterEventType.NO_OP) {
continue;
} else if (event.getType() == RemoteInterpreterEventType.ANGULAR_OBJECT_ADD) {
AngularObject angularObject = gson.fromJson(event.getData(), AngularObject.class);
angularObjectRegistry.add(angularObject.getName(),
angularObject.get(), angularObject.getNoteId(), angularObject.getParagraphId());
} else if (event.getType() == RemoteInterpreterEventType.ANGULAR_OBJECT_UPDATE) {
AngularObject angularObject = gson.fromJson(event.getData(),
AngularObject.class);
AngularObject localAngularObject = angularObjectRegistry.get(
angularObject.getName(), angularObject.getNoteId(), angularObject.getParagraphId());
if (localAngularObject instanceof RemoteAngularObject) {
// to avoid ping-pong loop
((RemoteAngularObject) localAngularObject).set(
angularObject.get(), true, false);
} else {
localAngularObject.set(angularObject.get());
}
} else if (event.getType() == RemoteInterpreterEventType.ANGULAR_OBJECT_REMOVE) {
AngularObject angularObject = gson.fromJson(event.getData(), AngularObject.class);
angularObjectRegistry.remove(angularObject.getName(), angularObject.getNoteId(),
angularObject.getParagraphId());
} else if (event.getType() == RemoteInterpreterEventType.RUN_INTERPRETER_CONTEXT_RUNNER) {
InterpreterContextRunner runnerFromRemote = gson.fromJson(
event.getData(), RemoteInterpreterContextRunner.class);
listener.onRemoteRunParagraph(
runnerFromRemote.getNoteId(), runnerFromRemote.getParagraphId());
} else if (event.getType() == RemoteInterpreterEventType.RESOURCE_POOL_GET_ALL) {
ResourceSet resourceSet = getAllResourcePoolExcept();
sendResourcePoolResponseGetAll(resourceSet);
} else if (event.getType() == RemoteInterpreterEventType.RESOURCE_GET) {
String resourceIdString = event.getData();
ResourceId resourceId = gson.fromJson(resourceIdString, ResourceId.class);
logger.debug("RESOURCE_GET {} {}", resourceId.getResourcePoolId(), resourceId.getName());
Object o = getResource(resourceId);
sendResourceResponseGet(resourceId, o);
} else if (event.getType() == RemoteInterpreterEventType.OUTPUT_APPEND) {
// on output append
Map<String, String> outputAppend = gson.fromJson(
event.getData(), new TypeToken<Map<String, Object>>() {}.getType());
String noteId = (String) outputAppend.get("noteId");
String paragraphId = (String) outputAppend.get("paragraphId");
int index = Integer.parseInt(outputAppend.get("index"));
String outputToAppend = (String) outputAppend.get("data");
String appId = (String) outputAppend.get("appId");
if (appId == null) {
runner.appendBuffer(noteId, paragraphId, index, outputToAppend);
} else {
appListener.onOutputAppend(noteId, paragraphId, index, appId, outputToAppend);
}
} else if (event.getType() == RemoteInterpreterEventType.OUTPUT_UPDATE_ALL) {
Map<String, Object> outputUpdate = gson.fromJson(
event.getData(), new TypeToken<Map<String, Object>>() {}.getType());
String noteId = (String) outputUpdate.get("noteId");
String paragraphId = (String) outputUpdate.get("paragraphId");
// clear the output
listener.onOutputClear(noteId, paragraphId);
List<Map<String, String>> messages =
(List<Map<String, String>>) outputUpdate.get("messages");
if (messages != null) {
for (int i = 0; i < messages.size(); i++) {
Map<String, String> m = messages.get(i);
InterpreterResult.Type type =
InterpreterResult.Type.valueOf((String) m.get("type"));
String outputToUpdate = (String) m.get("data");
listener.onOutputUpdated(noteId, paragraphId, i, type, outputToUpdate);
}
}
} else if (event.getType() == RemoteInterpreterEventType.OUTPUT_UPDATE) {
// on output update
Map<String, String> outputAppend = gson.fromJson(
event.getData(), new TypeToken<Map<String, Object>>() {}.getType());
String noteId = (String) outputAppend.get("noteId");
String paragraphId = (String) outputAppend.get("paragraphId");
int index = Integer.parseInt(outputAppend.get("index"));
InterpreterResult.Type type =
InterpreterResult.Type.valueOf((String) outputAppend.get("type"));
String outputToUpdate = (String) outputAppend.get("data");
String appId = (String) outputAppend.get("appId");
if (appId == null) {
listener.onOutputUpdated(noteId, paragraphId, index, type, outputToUpdate);
} else {
appListener.onOutputUpdated(noteId, paragraphId, index, appId, type, outputToUpdate);
}
} else if (event.getType() == RemoteInterpreterEventType.APP_STATUS_UPDATE) {
// on output update
Map<String, String> appStatusUpdate = gson.fromJson(
event.getData(), new TypeToken<Map<String, String>>() {}.getType());
String noteId = appStatusUpdate.get("noteId");
String paragraphId = appStatusUpdate.get("paragraphId");
String appId = appStatusUpdate.get("appId");
String status = appStatusUpdate.get("status");
appListener.onStatusChange(noteId, paragraphId, appId, status);
} else if (event.getType() == RemoteInterpreterEventType.REMOTE_ZEPPELIN_SERVER_RESOURCE) {
RemoteZeppelinServerResource reqResourceBody = gson.fromJson(
event.getData(), RemoteZeppelinServerResource.class);
progressRemoteZeppelinControlEvent(
reqResourceBody.getResourceType(), listener, reqResourceBody);
} else if (event.getType() == RemoteInterpreterEventType.META_INFOS) {
Map<String, String> metaInfos = gson.fromJson(event.getData(),
new TypeToken<Map<String, String>>() {
}.getType());
String id = interpreterGroup.getId();
int indexOfColon = id.indexOf(":");
String settingId = id.substring(0, indexOfColon);
listener.onMetaInfosReceived(settingId, metaInfos);
}
logger.debug("Event from remote process {}", event.getType());
} catch (Exception e) {
logger.error("Can't handle event " + event, e);
}
}
if (appendFuture != null) {
appendFuture.cancel(true);
}
}
private void progressRemoteZeppelinControlEvent(
RemoteZeppelinServerResource.Type resourceType,
RemoteInterpreterProcessListener remoteWorksEventListener,
RemoteZeppelinServerResource reqResourceBody) throws Exception {
boolean broken = false;
final Gson gson = new Gson();
final String eventOwnerKey = reqResourceBody.getOwnerKey();
Client interpreterServerMain = null;
try {
interpreterServerMain = interpreterProcess.getClient();
final Client eventClient = interpreterServerMain;
if (resourceType == RemoteZeppelinServerResource.Type.PARAGRAPH_RUNNERS) {
final List<ZeppelinServerResourceParagraphRunner> remoteRunners = new LinkedList<>();
ZeppelinServerResourceParagraphRunner reqRunnerContext =
new ZeppelinServerResourceParagraphRunner();
Map<String, Object> reqResourceMap = (Map<String, Object>) reqResourceBody.getData();
String noteId = (String) reqResourceMap.get("noteId");
String paragraphId = (String) reqResourceMap.get("paragraphId");
reqRunnerContext.setNoteId(noteId);
reqRunnerContext.setParagraphId(paragraphId);
RemoteInterpreterProcessListener.RemoteWorksEventListener callBackEvent =
new RemoteInterpreterProcessListener.RemoteWorksEventListener() {
@Override
public void onFinished(Object resultObject) {
boolean clientBroken = false;
if (resultObject != null && resultObject instanceof List) {
List<InterpreterContextRunner> runnerList =
(List<InterpreterContextRunner>) resultObject;
for (InterpreterContextRunner r : runnerList) {
remoteRunners.add(
new ZeppelinServerResourceParagraphRunner(r.getNoteId(), r.getParagraphId())
);
}
final RemoteZeppelinServerResource resResource =
new RemoteZeppelinServerResource();
resResource.setOwnerKey(eventOwnerKey);
resResource.setResourceType(RemoteZeppelinServerResource.Type.PARAGRAPH_RUNNERS);
resResource.setData(remoteRunners);
try {
eventClient.onReceivedZeppelinResource(gson.toJson(resResource));
} catch (Exception e) {
clientBroken = true;
logger.error("Can't get RemoteInterpreterEvent", e);
waitQuietly();
} finally {
interpreterProcess.releaseClient(eventClient, clientBroken);
}
}
}
@Override
public void onError() {
logger.info("onGetParagraphRunners onError");
}
};
remoteWorksEventListener.onGetParagraphRunners(
reqRunnerContext.getNoteId(), reqRunnerContext.getParagraphId(), callBackEvent);
}
} catch (Exception e) {
broken = true;
logger.error("Can't get RemoteInterpreterEvent", e);
waitQuietly();
} finally {
interpreterProcess.releaseClient(interpreterServerMain, broken);
}
}
private void sendResourcePoolResponseGetAll(ResourceSet resourceSet) {
Client client = null;
boolean broken = false;
try {
client = interpreterProcess.getClient();
List<String> resourceList = new LinkedList<>();
Gson gson = new Gson();
for (Resource r : resourceSet) {
resourceList.add(gson.toJson(r));
}
client.resourcePoolResponseGetAll(resourceList);
} catch (Exception e) {
logger.error(e.getMessage(), e);
broken = true;
} finally {
if (client != null) {
interpreterProcess.releaseClient(client, broken);
}
}
}
private ResourceSet getAllResourcePoolExcept() {
ResourceSet resourceSet = new ResourceSet();
for (InterpreterGroup intpGroup : InterpreterGroup.getAll()) {
if (intpGroup.getId().equals(interpreterGroup.getId())) {
continue;
}
RemoteInterpreterProcess remoteInterpreterProcess = intpGroup.getRemoteInterpreterProcess();
if (remoteInterpreterProcess == null) {
ResourcePool localPool = intpGroup.getResourcePool();
if (localPool != null) {
resourceSet.addAll(localPool.getAll());
}
} else if (interpreterProcess.isRunning()) {
Client client = null;
boolean broken = false;
try {
client = remoteInterpreterProcess.getClient();
List<String> resourceList = client.resourcePoolGetAll();
Gson gson = new Gson();
for (String res : resourceList) {
resourceSet.add(gson.fromJson(res, Resource.class));
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
broken = true;
} finally {
if (client != null) {
intpGroup.getRemoteInterpreterProcess().releaseClient(client, broken);
}
}
}
}
return resourceSet;
}
private void sendResourceResponseGet(ResourceId resourceId, Object o) {
Client client = null;
boolean broken = false;
try {
client = interpreterProcess.getClient();
Gson gson = new Gson();
String rid = gson.toJson(resourceId);
ByteBuffer obj;
if (o == null) {
obj = ByteBuffer.allocate(0);
} else {
obj = Resource.serializeObject(o);
}
client.resourceResponseGet(rid, obj);
} catch (Exception e) {
logger.error(e.getMessage(), e);
broken = true;
} finally {
if (client != null) {
interpreterProcess.releaseClient(client, broken);
}
}
}
private Object getResource(ResourceId resourceId) {
InterpreterGroup intpGroup = InterpreterGroup.getByInterpreterGroupId(
resourceId.getResourcePoolId());
if (intpGroup == null) {
return null;
}
RemoteInterpreterProcess remoteInterpreterProcess = intpGroup.getRemoteInterpreterProcess();
if (remoteInterpreterProcess == null) {
ResourcePool localPool = intpGroup.getResourcePool();
if (localPool != null) {
return localPool.get(resourceId.getName());
}
} else if (interpreterProcess.isRunning()) {
Client client = null;
boolean broken = false;
try {
client = remoteInterpreterProcess.getClient();
ByteBuffer res = client.resourceGet(
resourceId.getNoteId(),
resourceId.getParagraphId(),
resourceId.getName());
Object o = Resource.deserializeObject(res);
return o;
} catch (Exception e) {
logger.error(e.getMessage(), e);
broken = true;
} finally {
if (client != null) {
intpGroup.getRemoteInterpreterProcess().releaseClient(client, broken);
}
}
}
return null;
}
private void waitQuietly() {
try {
synchronized (this) {
wait(1000);
}
} catch (InterruptedException ignored) {
logger.info("Error in RemoteInterpreterEventPoller while waitQuietly : ", ignored);
}
}
public void shutdown() {
shutdown = true;
synchronized (this) {
notify();
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.net.URL;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.thrift.TException;
import org.apache.thrift.server.TThreadPoolServer;
import org.apache.thrift.transport.TServerSocket;
import org.apache.thrift.transport.TTransportException;
import org.apache.zeppelin.dep.DependencyResolver;
import org.apache.zeppelin.display.*;
import org.apache.zeppelin.helium.*;
import org.apache.zeppelin.interpreter.*;
import org.apache.zeppelin.interpreter.InterpreterHookRegistry.HookType;
import org.apache.zeppelin.interpreter.InterpreterHookListener;
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
import org.apache.zeppelin.interpreter.thrift.*;
import org.apache.zeppelin.resource.*;
import org.apache.zeppelin.scheduler.Job;
import org.apache.zeppelin.scheduler.Job.Status;
import org.apache.zeppelin.scheduler.JobListener;
import org.apache.zeppelin.scheduler.JobProgressPoller;
import org.apache.zeppelin.scheduler.Scheduler;
import org.apache.zeppelin.user.AuthenticationInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
/**
* Entry point for Interpreter process.
* Accepting thrift connections from ZeppelinServer.
*/
public class RemoteInterpreterServer
extends Thread
implements RemoteInterpreterService.Iface, AngularObjectRegistryListener {
Logger logger = LoggerFactory.getLogger(RemoteInterpreterServer.class);
InterpreterGroup interpreterGroup;
AngularObjectRegistry angularObjectRegistry;
InterpreterHookRegistry hookRegistry;
DistributedResourcePool resourcePool;
private ApplicationLoader appLoader;
Gson gson = new Gson();
RemoteInterpreterService.Processor<RemoteInterpreterServer> processor;
private int port;
private TThreadPoolServer server;
RemoteInterpreterEventClient eventClient = new RemoteInterpreterEventClient();
private DependencyResolver depLoader;
private final Map<String, RunningApplication> runningApplications =
Collections.synchronizedMap(new HashMap<String, RunningApplication>());
private Map<String, Object> remoteWorksResponsePool;
private ZeppelinRemoteWorksController remoteWorksController;
private final long DEFAULT_SHUTDOWN_TIMEOUT = 2000;
public RemoteInterpreterServer(int port) throws TTransportException {
this.port = port;
processor = new RemoteInterpreterService.Processor<>(this);
TServerSocket serverTransport = new TServerSocket(port);
server = new TThreadPoolServer(
new TThreadPoolServer.Args(serverTransport).processor(processor));
remoteWorksResponsePool = Collections.synchronizedMap(new HashMap<String, Object>());
remoteWorksController = new ZeppelinRemoteWorksController(this, remoteWorksResponsePool);
}
@Override
public void run() {
logger.info("Starting remote interpreter server on port {}", port);
server.serve();
}
@Override
public void shutdown() throws TException {
eventClient.waitForEventQueueBecomesEmpty(DEFAULT_SHUTDOWN_TIMEOUT);
if (interpreterGroup != null) {
interpreterGroup.close();
}
server.stop();
// server.stop() does not always finish server.serve() loop
// sometimes server.serve() is hanging even after server.stop() call.
// this case, need to force kill the process
long startTime = System.currentTimeMillis();
while (System.currentTimeMillis() - startTime < DEFAULT_SHUTDOWN_TIMEOUT &&
server.isServing()) {
try {
Thread.sleep(300);
} catch (InterruptedException e) {
logger.info("Exception in RemoteInterpreterServer while shutdown, Thread.sleep", e);
}
}
if (server.isServing()) {
System.exit(0);
}
}
public int getPort() {
return port;
}
public boolean isRunning() {
if (server == null) {
return false;
} else {
return server.isServing();
}
}
public static void main(String[] args)
throws TTransportException, InterruptedException {
int port = Constants.ZEPPELIN_INTERPRETER_DEFAUlT_PORT;
if (args.length > 0) {
port = Integer.parseInt(args[0]);
}
RemoteInterpreterServer remoteInterpreterServer = new RemoteInterpreterServer(port);
remoteInterpreterServer.start();
remoteInterpreterServer.join();
System.exit(0);
}
@Override
public void createInterpreter(String interpreterGroupId, String sessionKey, String
className, Map<String, String> properties, String userName) throws TException {
if (interpreterGroup == null) {
interpreterGroup = new InterpreterGroup(interpreterGroupId);
angularObjectRegistry = new AngularObjectRegistry(interpreterGroup.getId(), this);
hookRegistry = new InterpreterHookRegistry(interpreterGroup.getId());
resourcePool = new DistributedResourcePool(interpreterGroup.getId(), eventClient);
interpreterGroup.setInterpreterHookRegistry(hookRegistry);
interpreterGroup.setAngularObjectRegistry(angularObjectRegistry);
interpreterGroup.setResourcePool(resourcePool);
String localRepoPath = properties.get("zeppelin.interpreter.localRepo");
if (properties.containsKey("zeppelin.interpreter.output.limit")) {
InterpreterOutput.limit = Integer.parseInt(
properties.get("zeppelin.interpreter.output.limit"));
}
depLoader = new DependencyResolver(localRepoPath);
appLoader = new ApplicationLoader(resourcePool, depLoader);
}
try {
Class<Interpreter> replClass = (Class<Interpreter>) Object.class.forName(className);
Properties p = new Properties();
p.putAll(properties);
setSystemProperty(p);
Constructor<Interpreter> constructor =
replClass.getConstructor(new Class[] {Properties.class});
Interpreter repl = constructor.newInstance(p);
repl.setClassloaderUrls(new URL[]{});
synchronized (interpreterGroup) {
List<Interpreter> interpreters = interpreterGroup.get(sessionKey);
if (interpreters == null) {
interpreters = new LinkedList<>();
interpreterGroup.put(sessionKey, interpreters);
}
interpreters.add(new LazyOpenInterpreter(repl));
}
logger.info("Instantiate interpreter {}", className);
repl.setInterpreterGroup(interpreterGroup);
repl.setUserName(userName);
} catch (ClassNotFoundException | NoSuchMethodException | SecurityException
| InstantiationException | IllegalAccessException
| IllegalArgumentException | InvocationTargetException e) {
logger.error(e.toString(), e);
throw new TException(e);
}
}
protected InterpreterGroup getInterpreterGroup() {
return interpreterGroup;
}
protected ResourcePool getResourcePool() {
return resourcePool;
}
protected RemoteInterpreterEventClient getEventClient() {
return eventClient;
}
private void setSystemProperty(Properties properties) {
for (Object key : properties.keySet()) {
if (!RemoteInterpreter.isEnvString((String) key)) {
String value = properties.getProperty((String) key);
if (value == null || value.isEmpty()) {
System.clearProperty((String) key);
} else {
System.setProperty((String) key, properties.getProperty((String) key));
}
}
}
}
protected Interpreter getInterpreter(String sessionKey, String className) throws TException {
if (interpreterGroup == null) {
throw new TException(
new InterpreterException("Interpreter instance " + className + " not created"));
}
synchronized (interpreterGroup) {
List<Interpreter> interpreters = interpreterGroup.get(sessionKey);
if (interpreters == null) {
throw new TException(
new InterpreterException("Interpreter " + className + " not initialized"));
}
for (Interpreter inp : interpreters) {
if (inp.getClassName().equals(className)) {
return inp;
}
}
}
throw new TException(new InterpreterException("Interpreter instance "
+ className + " not found"));
}
@Override
public void open(String noteId, String className) throws TException {
Interpreter intp = getInterpreter(noteId, className);
intp.open();
}
@Override
public void close(String sessionKey, String className) throws TException {
// unload all applications
for (String appId : runningApplications.keySet()) {
RunningApplication appInfo = runningApplications.get(appId);
// see NoteInterpreterLoader.SHARED_SESSION
if (appInfo.noteId.equals(sessionKey) || sessionKey.equals("shared_session")) {
try {
logger.info("Unload App {} ", appInfo.pkg.getName());
appInfo.app.unload();
// see ApplicationState.Status.UNLOADED
eventClient.onAppStatusUpdate(appInfo.noteId, appInfo.paragraphId, appId, "UNLOADED");
} catch (ApplicationException e) {
logger.error(e.getMessage(), e);
}
}
}
// close interpreters
List<Interpreter> interpreters;
synchronized (interpreterGroup) {
interpreters = interpreterGroup.get(sessionKey);
}
if (interpreters != null) {
Iterator<Interpreter> it = interpreters.iterator();
while (it.hasNext()) {
Interpreter inp = it.next();
if (inp.getClassName().equals(className)) {
inp.close();
it.remove();
break;
}
}
}
}
@Override
public RemoteInterpreterResult interpret(String noteId, String className, String st,
RemoteInterpreterContext interpreterContext) throws TException {
if (logger.isDebugEnabled()) {
logger.debug("st:\n{}", st);
}
Interpreter intp = getInterpreter(noteId, className);
InterpreterContext context = convert(interpreterContext);
context.setClassName(intp.getClassName());
Scheduler scheduler = intp.getScheduler();
InterpretJobListener jobListener = new InterpretJobListener();
InterpretJob job = new InterpretJob(
interpreterContext.getParagraphId(),
"remoteInterpretJob_" + System.currentTimeMillis(),
jobListener,
JobProgressPoller.DEFAULT_INTERVAL_MSEC,
intp,
st,
context);
scheduler.submit(job);
while (!job.isTerminated()) {
synchronized (jobListener) {
try {
jobListener.wait(1000);
} catch (InterruptedException e) {
logger.info("Exception in RemoteInterpreterServer while interpret, jobListener.wait", e);
}
}
}
InterpreterResult result;
if (job.getStatus() == Status.ERROR) {
result = new InterpreterResult(Code.ERROR, Job.getStack(job.getException()));
} else {
result = (InterpreterResult) job.getReturn();
// in case of job abort in PENDING status, result can be null
if (result == null) {
result = new InterpreterResult(Code.KEEP_PREVIOUS_RESULT);
}
}
return convert(result,
context.getConfig(),
context.getGui());
}
@Override
public void onReceivedZeppelinResource(String responseJson) throws TException {
RemoteZeppelinServerResource response = gson.fromJson(
responseJson, RemoteZeppelinServerResource.class);
if (response == null) {
throw new TException("Bad response for remote resource");
}
try {
if (response.getResourceType() == RemoteZeppelinServerResource.Type.PARAGRAPH_RUNNERS) {
List<InterpreterContextRunner> intpContextRunners = new LinkedList<>();
List<Map<String, Object>> remoteRunnersMap =
(List<Map<String, Object>>) response.getData();
String noteId = null;
String paragraphId = null;
for (Map<String, Object> runnerItem : remoteRunnersMap) {
noteId = (String) runnerItem.get("noteId");
paragraphId = (String) runnerItem.get("paragraphId");
intpContextRunners.add(
new ParagraphRunner(this, noteId, paragraphId)
);
}
synchronized (this.remoteWorksResponsePool) {
this.remoteWorksResponsePool.put(
response.getOwnerKey(),
intpContextRunners);
}
}
} catch (Exception e) {
throw e;
}
}
class InterpretJobListener implements JobListener {
@Override
public void onProgressUpdate(Job job, int progress) {
}
@Override
public void beforeStatusChange(Job job, Status before, Status after) {
}
@Override
public void afterStatusChange(Job job, Status before, Status after) {
synchronized (this) {
notifyAll();
}
}
}
class InterpretJob extends Job {
private Interpreter interpreter;
private String script;
private InterpreterContext context;
private Map<String, Object> infos;
private Object results;
public InterpretJob(
String jobId,
String jobName,
JobListener listener,
long progressUpdateIntervalMsec,
Interpreter interpreter,
String script,
InterpreterContext context) {
super(jobId, jobName, listener, progressUpdateIntervalMsec);
this.interpreter = interpreter;
this.script = script;
this.context = context;
}
@Override
public Object getReturn() {
return results;
}
@Override
public int progress() {
return 0;
}
@Override
public Map<String, Object> info() {
if (infos == null) {
infos = new HashMap<>();
}
return infos;
}
private void processInterpreterHooks(final String noteId) {
InterpreterHookListener hookListener = new InterpreterHookListener() {
@Override
public void onPreExecute(String script) {
String cmdDev = interpreter.getHook(noteId, HookType.PRE_EXEC_DEV);
String cmdUser = interpreter.getHook(noteId, HookType.PRE_EXEC);
// User defined hook should be executed before dev hook
List<String> cmds = Arrays.asList(cmdDev, cmdUser);
for (String cmd : cmds) {
if (cmd != null) {
script = cmd + '\n' + script;
}
}
InterpretJob.this.script = script;
}
@Override
public void onPostExecute(String script) {
String cmdDev = interpreter.getHook(noteId, HookType.POST_EXEC_DEV);
String cmdUser = interpreter.getHook(noteId, HookType.POST_EXEC);
// User defined hook should be executed after dev hook
List<String> cmds = Arrays.asList(cmdUser, cmdDev);
for (String cmd : cmds) {
if (cmd != null) {
script += '\n' + cmd;
}
}
InterpretJob.this.script = script;
}
};
hookListener.onPreExecute(script);
hookListener.onPostExecute(script);
}
@Override
protected Object jobRun() throws Throwable {
try {
InterpreterContext.set(context);
// Open the interpreter instance prior to calling interpret().
// This is necessary because the earliest we can register a hook
// is from within the open() method.
LazyOpenInterpreter lazy = (LazyOpenInterpreter) interpreter;
if (!lazy.isOpen()) {
lazy.open();
}
// Add hooks to script from registry.
// Global scope first, followed by notebook scope
processInterpreterHooks(null);
processInterpreterHooks(context.getNoteId());
InterpreterResult result = interpreter.interpret(script, context);
// data from context.out is prepended to InterpreterResult if both defined
context.out.flush();
List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
resultMessages.addAll(result.message());
// put result into resource pool
if (resultMessages.size() > 0) {
int lastMessageIndex = resultMessages.size() - 1;
if (resultMessages.get(lastMessageIndex).getType() ==
InterpreterResult.Type.TABLE) {
context.getResourcePool().put(
context.getNoteId(),
context.getParagraphId(),
WellKnownResourceName.ZeppelinTableResult.toString(),
resultMessages.get(lastMessageIndex));
}
}
return new InterpreterResult(result.code(), resultMessages);
} finally {
InterpreterContext.remove();
}
}
@Override
protected boolean jobAbort() {
return false;
}
@Override
public void setResult(Object results) {
this.results = results;
}
}
@Override
public void cancel(String noteId, String className, RemoteInterpreterContext interpreterContext)
throws TException {
logger.info("cancel {} {}", className, interpreterContext.getParagraphId());
Interpreter intp = getInterpreter(noteId, className);
String jobId = interpreterContext.getParagraphId();
Job job = intp.getScheduler().removeFromWaitingQueue(jobId);
if (job != null) {
job.setStatus(Status.ABORT);
} else {
intp.cancel(convert(interpreterContext, null));
}
}
@Override
public int getProgress(String noteId, String className,
RemoteInterpreterContext interpreterContext)
throws TException {
Interpreter intp = getInterpreter(noteId, className);
return intp.getProgress(convert(interpreterContext, null));
}
@Override
public String getFormType(String noteId, String className) throws TException {
Interpreter intp = getInterpreter(noteId, className);
return intp.getFormType().toString();
}
@Override
public List<InterpreterCompletion> completion(String noteId,
String className, String buf, int cursor)
throws TException {
Interpreter intp = getInterpreter(noteId, className);
List completion = intp.completion(buf, cursor);
return completion;
}
private InterpreterContext convert(RemoteInterpreterContext ric) {
return convert(ric, createInterpreterOutput(ric.getNoteId(), ric.getParagraphId()));
}
private InterpreterContext convert(RemoteInterpreterContext ric, InterpreterOutput output) {
List<InterpreterContextRunner> contextRunners = new LinkedList<>();
List<InterpreterContextRunner> runners = gson.fromJson(ric.getRunners(),
new TypeToken<List<RemoteInterpreterContextRunner>>() {
}.getType());
for (InterpreterContextRunner r : runners) {
contextRunners.add(new ParagraphRunner(this, r.getNoteId(), r.getParagraphId()));
}
return new InterpreterContext(
ric.getNoteId(),
ric.getParagraphId(),
ric.getReplName(),
ric.getParagraphTitle(),
ric.getParagraphText(),
gson.fromJson(ric.getAuthenticationInfo(), AuthenticationInfo.class),
(Map<String, Object>) gson.fromJson(ric.getConfig(),
new TypeToken<Map<String, Object>>() {}.getType()),
gson.fromJson(ric.getGui(), GUI.class),
interpreterGroup.getAngularObjectRegistry(),
interpreterGroup.getResourcePool(),
contextRunners, output, remoteWorksController, eventClient);
}
protected InterpreterOutput createInterpreterOutput(final String noteId, final String
paragraphId) {
return new InterpreterOutput(new InterpreterOutputListener() {
@Override
public void onUpdateAll(InterpreterOutput out) {
try {
eventClient.onInterpreterOutputUpdateAll(
noteId, paragraphId, out.toInterpreterResultMessage());
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
@Override
public void onAppend(int index, InterpreterResultMessageOutput out, byte[] line) {
String output = new String(line);
logger.debug("Output Append: {}", output);
eventClient.onInterpreterOutputAppend(
noteId, paragraphId, index, output);
}
@Override
public void onUpdate(int index, InterpreterResultMessageOutput out) {
String output;
try {
output = new String(out.toByteArray());
logger.debug("Output Update: {}", output);
eventClient.onInterpreterOutputUpdate(
noteId, paragraphId, index, out.getType(), output);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
});
}
static class ParagraphRunner extends InterpreterContextRunner {
Logger logger = LoggerFactory.getLogger(ParagraphRunner.class);
private transient RemoteInterpreterServer server;
public ParagraphRunner(RemoteInterpreterServer server, String noteId, String paragraphId) {
super(noteId, paragraphId);
this.server = server;
}
@Override
public void run() {
server.eventClient.run(this);
}
}
static class ZeppelinRemoteWorksController implements RemoteWorksController{
Logger logger = LoggerFactory.getLogger(ZeppelinRemoteWorksController.class);
private final long DEFAULT_TIMEOUT_VALUE = 300000;
private final Map<String, Object> remoteWorksResponsePool;
private RemoteInterpreterServer server;
public ZeppelinRemoteWorksController(
RemoteInterpreterServer server, Map<String, Object> remoteWorksResponsePool) {
this.remoteWorksResponsePool = remoteWorksResponsePool;
this.server = server;
}
public String generateOwnerKey() {
String hashKeyText = new String("ownerKey" + System.currentTimeMillis());
String hashKey = String.valueOf(hashKeyText.hashCode());
return hashKey;
}
public boolean waitForEvent(String eventOwnerKey) throws InterruptedException {
return waitForEvent(eventOwnerKey, DEFAULT_TIMEOUT_VALUE);
}
public boolean waitForEvent(String eventOwnerKey, long timeout) throws InterruptedException {
boolean wasGetData = false;
long now = System.currentTimeMillis();
long endTime = System.currentTimeMillis() + timeout;
while (endTime >= now) {
synchronized (this.remoteWorksResponsePool) {
wasGetData = this.remoteWorksResponsePool.containsKey(eventOwnerKey);
}
if (wasGetData == true) {
break;
}
now = System.currentTimeMillis();
sleep(500);
}
return wasGetData;
}
@Override
public List<InterpreterContextRunner> getRemoteContextRunner(String noteId) {
return getRemoteContextRunner(noteId, null);
}
public List<InterpreterContextRunner> getRemoteContextRunner(
String noteId, String paragraphID) {
List<InterpreterContextRunner> runners = null;
String ownerKey = generateOwnerKey();
ZeppelinServerResourceParagraphRunner resource = new ZeppelinServerResourceParagraphRunner();
resource.setNoteId(noteId);
resource.setParagraphId(paragraphID);
server.eventClient.getZeppelinServerNoteRunner(ownerKey, resource);
try {
this.waitForEvent(ownerKey);
} catch (Exception e) {
return new LinkedList<>();
}
synchronized (this.remoteWorksResponsePool) {
runners = (List<InterpreterContextRunner>) this.remoteWorksResponsePool.get(ownerKey);
this.remoteWorksResponsePool.remove(ownerKey);
}
return runners;
}
}
private RemoteInterpreterResult convert(InterpreterResult result,
Map<String, Object> config, GUI gui) {
List<RemoteInterpreterResultMessage> msg = new LinkedList<>();
for (InterpreterResultMessage m : result.message()) {
msg.add(new RemoteInterpreterResultMessage(
m.getType().name(),
m.getData()));
}
return new RemoteInterpreterResult(
result.code().name(),
msg,
gson.toJson(config),
gson.toJson(gui));
}
@Override
public String getStatus(String sessionKey, String jobId)
throws TException {
if (interpreterGroup == null) {
return "Unknown";
}
synchronized (interpreterGroup) {
List<Interpreter> interpreters = interpreterGroup.get(sessionKey);
if (interpreters == null) {
return "Unknown";
}
for (Interpreter intp : interpreters) {
for (Job job : intp.getScheduler().getJobsRunning()) {
if (jobId.equals(job.getId())) {
return job.getStatus().name();
}
}
for (Job job : intp.getScheduler().getJobsWaiting()) {
if (jobId.equals(job.getId())) {
return job.getStatus().name();
}
}
}
}
return "Unknown";
}
@Override
public void onAdd(String interpreterGroupId, AngularObject object) {
eventClient.angularObjectAdd(object);
}
@Override
public void onUpdate(String interpreterGroupId, AngularObject object) {
eventClient.angularObjectUpdate(object);
}
@Override
public void onRemove(String interpreterGroupId, String name, String noteId, String paragraphId) {
eventClient.angularObjectRemove(name, noteId, paragraphId);
}
/**
* Poll event from RemoteInterpreterEventPoller
* @return
* @throws TException
*/
@Override
public RemoteInterpreterEvent getEvent() throws TException {
return eventClient.pollEvent();
}
/**
* called when object is updated in client (web) side.
* @param name
* @param noteId noteId where the update issues
* @param paragraphId paragraphId where the update issues
* @param object
* @throws TException
*/
@Override
public void angularObjectUpdate(String name, String noteId, String paragraphId, String object)
throws TException {
AngularObjectRegistry registry = interpreterGroup.getAngularObjectRegistry();
// first try local objects
AngularObject ao = registry.get(name, noteId, paragraphId);
if (ao == null) {
logger.debug("Angular object {} not exists", name);
return;
}
if (object == null) {
ao.set(null, false);
return;
}
Object oldObject = ao.get();
Object value = null;
if (oldObject != null) { // first try with previous object's type
try {
value = gson.fromJson(object, oldObject.getClass());
ao.set(value, false);
return;
} catch (Exception e) {
// it's not a previous object's type. proceed to treat as a generic type
logger.debug(e.getMessage(), e);
}
}
// Generic java object type for json.
if (value == null) {
try {
value = gson.fromJson(object,
new TypeToken<Map<String, Object>>() {
}.getType());
} catch (Exception e) {
// it's not a generic json object, too. okay, proceed to threat as a string type
logger.debug(e.getMessage(), e);
}
}
// try string object type at last
if (value == null) {
value = gson.fromJson(object, String.class);
}
ao.set(value, false);
}
/**
* When zeppelinserver initiate angular object add.
* Dont't need to emit event to zeppelin server
*/
@Override
public void angularObjectAdd(String name, String noteId, String paragraphId, String object)
throws TException {
AngularObjectRegistry registry = interpreterGroup.getAngularObjectRegistry();
// first try local objects
AngularObject ao = registry.get(name, noteId, paragraphId);
if (ao != null) {
angularObjectUpdate(name, noteId, paragraphId, object);
return;
}
// Generic java object type for json.
Object value = null;
try {
value = gson.fromJson(object,
new TypeToken<Map<String, Object>>() {
}.getType());
} catch (Exception e) {
// it's okay. proceed to treat object as a string
logger.debug(e.getMessage(), e);
}
// try string object type at last
if (value == null) {
value = gson.fromJson(object, String.class);
}
registry.add(name, value, noteId, paragraphId, false);
}
@Override
public void angularObjectRemove(String name, String noteId, String paragraphId) throws
TException {
AngularObjectRegistry registry = interpreterGroup.getAngularObjectRegistry();
registry.remove(name, noteId, paragraphId, false);
}
@Override
public void resourcePoolResponseGetAll(List<String> resources) throws TException {
eventClient.putResponseGetAllResources(resources);
}
/**
* Get payload of resource from remote
* @param resourceId json serialized ResourceId
* @param object java serialized of the object
* @throws TException
*/
@Override
public void resourceResponseGet(String resourceId, ByteBuffer object) throws TException {
eventClient.putResponseGetResource(resourceId, object);
}
@Override
public List<String> resourcePoolGetAll() throws TException {
logger.debug("Request getAll from ZeppelinServer");
List<String> result = new LinkedList<>();
if (resourcePool == null) {
return result;
}
ResourceSet resourceSet = resourcePool.getAll(false);
Gson gson = new Gson();
for (Resource r : resourceSet) {
result.add(gson.toJson(r));
}
return result;
}
@Override
public boolean resourceRemove(String noteId, String paragraphId, String resourceName)
throws TException {
Resource resource = resourcePool.remove(noteId, paragraphId, resourceName);
return resource != null;
}
@Override
public ByteBuffer resourceGet(String noteId, String paragraphId, String resourceName)
throws TException {
logger.debug("Request resourceGet {} from ZeppelinServer", resourceName);
Resource resource = resourcePool.get(noteId, paragraphId, resourceName, false);
if (resource == null || resource.get() == null || !resource.isSerializable()) {
return ByteBuffer.allocate(0);
} else {
try {
return Resource.serializeObject(resource.get());
} catch (IOException e) {
logger.error(e.getMessage(), e);
return ByteBuffer.allocate(0);
}
}
}
@Override
public void angularRegistryPush(String registryAsString) throws TException {
try {
Map<String, Map<String, AngularObject>> deserializedRegistry = gson
.fromJson(registryAsString,
new TypeToken<Map<String, Map<String, AngularObject>>>() { }.getType());
interpreterGroup.getAngularObjectRegistry().setRegistry(deserializedRegistry);
} catch (Exception e) {
logger.info("Exception in RemoteInterpreterServer while angularRegistryPush, nolock", e);
}
}
protected InterpreterOutput createAppOutput(final String noteId,
final String paragraphId,
final String appId) {
return new InterpreterOutput(new InterpreterOutputListener() {
@Override
public void onUpdateAll(InterpreterOutput out) {
}
@Override
public void onAppend(int index, InterpreterResultMessageOutput out, byte[] line) {
eventClient.onAppOutputAppend(noteId, paragraphId, index, appId, new String(line));
}
@Override
public void onUpdate(int index, InterpreterResultMessageOutput out) {
try {
eventClient.onAppOutputUpdate(noteId, paragraphId, index, appId,
out.getType(), new String(out.toByteArray()));
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
});
}
private ApplicationContext getApplicationContext(
HeliumPackage packageInfo, String noteId, String paragraphId, String applicationInstanceId) {
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | true |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreter.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import java.util.*;
import org.apache.thrift.TException;
import org.apache.zeppelin.display.AngularObject;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.display.GUI;
import org.apache.zeppelin.helium.ApplicationEventListener;
import org.apache.zeppelin.display.Input;
import org.apache.zeppelin.interpreter.*;
import org.apache.zeppelin.interpreter.InterpreterResult.Type;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterContext;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterResult;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterResultMessage;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
import org.apache.zeppelin.scheduler.Scheduler;
import org.apache.zeppelin.scheduler.SchedulerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
/**
* Proxy for Interpreter instance that runs on separate process
*/
public class RemoteInterpreter extends Interpreter {
private static final Logger logger = LoggerFactory.getLogger(RemoteInterpreter.class);
private final RemoteInterpreterProcessListener remoteInterpreterProcessListener;
private final ApplicationEventListener applicationEventListener;
private Gson gson = new Gson();
private String interpreterRunner;
private String interpreterPath;
private String localRepoPath;
private String className;
private String sessionKey;
private FormType formType;
private boolean initialized;
private Map<String, String> env;
private int connectTimeout;
private int maxPoolSize;
private String host;
private int port;
private String userName;
private Boolean isUserImpersonate;
private int outputLimit = Constants.ZEPPELIN_INTERPRETER_OUTPUT_LIMIT;
/**
* Remote interpreter and manage interpreter process
*/
public RemoteInterpreter(Properties property, String sessionKey, String className,
String interpreterRunner, String interpreterPath, String localRepoPath, int connectTimeout,
int maxPoolSize, RemoteInterpreterProcessListener remoteInterpreterProcessListener,
ApplicationEventListener appListener, String userName, Boolean isUserImpersonate,
int outputLimit) {
super(property);
this.sessionKey = sessionKey;
this.className = className;
initialized = false;
this.interpreterRunner = interpreterRunner;
this.interpreterPath = interpreterPath;
this.localRepoPath = localRepoPath;
env = getEnvFromInterpreterProperty(property);
this.connectTimeout = connectTimeout;
this.maxPoolSize = maxPoolSize;
this.remoteInterpreterProcessListener = remoteInterpreterProcessListener;
this.applicationEventListener = appListener;
this.userName = userName;
this.isUserImpersonate = isUserImpersonate;
this.outputLimit = outputLimit;
}
/**
* Connect to existing process
*/
public RemoteInterpreter(Properties property, String sessionKey, String className, String host,
int port, String localRepoPath, int connectTimeout, int maxPoolSize,
RemoteInterpreterProcessListener remoteInterpreterProcessListener,
ApplicationEventListener appListener, String userName, Boolean isUserImpersonate,
int outputLimit) {
super(property);
this.sessionKey = sessionKey;
this.className = className;
initialized = false;
this.host = host;
this.port = port;
this.localRepoPath = localRepoPath;
this.connectTimeout = connectTimeout;
this.maxPoolSize = maxPoolSize;
this.remoteInterpreterProcessListener = remoteInterpreterProcessListener;
this.applicationEventListener = appListener;
this.userName = userName;
this.isUserImpersonate = isUserImpersonate;
this.outputLimit = outputLimit;
}
// VisibleForTesting
public RemoteInterpreter(Properties property, String sessionKey, String className,
String interpreterRunner, String interpreterPath, String localRepoPath,
Map<String, String> env, int connectTimeout,
RemoteInterpreterProcessListener remoteInterpreterProcessListener,
ApplicationEventListener appListener, String userName, Boolean isUserImpersonate) {
super(property);
this.className = className;
this.sessionKey = sessionKey;
this.interpreterRunner = interpreterRunner;
this.interpreterPath = interpreterPath;
this.localRepoPath = localRepoPath;
env.putAll(getEnvFromInterpreterProperty(property));
this.env = env;
this.connectTimeout = connectTimeout;
this.maxPoolSize = 10;
this.remoteInterpreterProcessListener = remoteInterpreterProcessListener;
this.applicationEventListener = appListener;
this.userName = userName;
this.isUserImpersonate = isUserImpersonate;
}
private Map<String, String> getEnvFromInterpreterProperty(Properties property) {
Map<String, String> env = new HashMap<>();
for (Object key : property.keySet()) {
if (isEnvString((String) key)) {
env.put((String) key, property.getProperty((String) key));
}
}
return env;
}
static boolean isEnvString(String key) {
if (key == null || key.length() == 0) {
return false;
}
return key.matches("^[A-Z_0-9]*");
}
@Override
public String getClassName() {
return className;
}
private boolean connectToExistingProcess() {
return host != null && port > 0;
}
public RemoteInterpreterProcess getInterpreterProcess() {
InterpreterGroup intpGroup = getInterpreterGroup();
if (intpGroup == null) {
return null;
}
synchronized (intpGroup) {
if (intpGroup.getRemoteInterpreterProcess() == null) {
RemoteInterpreterProcess remoteProcess;
if (connectToExistingProcess()) {
remoteProcess = new RemoteInterpreterRunningProcess(
connectTimeout,
remoteInterpreterProcessListener,
applicationEventListener,
host,
port);
} else {
// create new remote process
remoteProcess = new RemoteInterpreterManagedProcess(
interpreterRunner, interpreterPath, localRepoPath, env, connectTimeout,
remoteInterpreterProcessListener, applicationEventListener);
}
intpGroup.setRemoteInterpreterProcess(remoteProcess);
}
return intpGroup.getRemoteInterpreterProcess();
}
}
public synchronized void init() {
if (initialized == true) {
return;
}
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
final InterpreterGroup interpreterGroup = getInterpreterGroup();
interpreterProcess.setMaxPoolSize(
Math.max(this.maxPoolSize, interpreterProcess.getMaxPoolSize()));
String groupId = interpreterGroup.getId();
synchronized (interpreterProcess) {
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
boolean broken = false;
try {
logger.info("Create remote interpreter {}", getClassName());
if (localRepoPath != null) {
property.put("zeppelin.interpreter.localRepo", localRepoPath);
}
property.put("zeppelin.interpreter.output.limit", Integer.toString(outputLimit));
client.createInterpreter(groupId, sessionKey,
getClassName(), (Map) property, userName);
// Push angular object loaded from JSON file to remote interpreter
if (!interpreterGroup.isAngularRegistryPushed()) {
pushAngularObjectRegistryToRemote(client);
interpreterGroup.setAngularRegistryPushed(true);
}
} catch (TException e) {
logger.error("Failed to create interpreter: {}", getClassName());
throw new InterpreterException(e);
} finally {
// TODO(jongyoul): Fixed it when not all of interpreter in same interpreter group are broken
interpreterProcess.releaseClient(client, broken);
}
}
initialized = true;
}
@Override
public void open() {
InterpreterGroup interpreterGroup = getInterpreterGroup();
synchronized (interpreterGroup) {
// initialize all interpreters in this interpreter group
List<Interpreter> interpreters = interpreterGroup.get(sessionKey);
// TODO(jl): this open method is called by LazyOpenInterpreter.open(). It, however,
// initializes all of interpreters with same sessionKey. But LazyOpenInterpreter assumes if it
// doesn't call open method, it's not open. It causes problem while running intp.close()
// In case of Spark, this method initializes all of interpreters and init() method increases
// reference count of RemoteInterpreterProcess. But while closing this interpreter group, all
// other interpreters doesn't do anything because those LazyInterpreters aren't open.
// But for now, we have to initialise all of interpreters for some reasons.
// See Interpreter.getInterpreterInTheSameSessionByClassName(String)
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
if (!initialized) {
// reference per session
interpreterProcess.reference(interpreterGroup, userName, isUserImpersonate);
}
for (Interpreter intp : new ArrayList<>(interpreters)) {
Interpreter p = intp;
while (p instanceof WrappedInterpreter) {
p = ((WrappedInterpreter) p).getInnerInterpreter();
}
try {
((RemoteInterpreter) p).init();
} catch (InterpreterException e) {
logger.error("Failed to initialize interpreter: {}. Remove it from interpreterGroup",
p.getClassName());
interpreters.remove(p);
}
}
}
}
@Override
public void close() {
InterpreterGroup interpreterGroup = getInterpreterGroup();
synchronized (interpreterGroup) {
// close all interpreters in this session
List<Interpreter> interpreters = interpreterGroup.get(sessionKey);
// TODO(jl): this open method is called by LazyOpenInterpreter.open(). It, however,
// initializes all of interpreters with same sessionKey. But LazyOpenInterpreter assumes if it
// doesn't call open method, it's not open. It causes problem while running intp.close()
// In case of Spark, this method initializes all of interpreters and init() method increases
// reference count of RemoteInterpreterProcess. But while closing this interpreter group, all
// other interpreters doesn't do anything because those LazyInterpreters aren't open.
// But for now, we have to initialise all of interpreters for some reasons.
// See Interpreter.getInterpreterInTheSameSessionByClassName(String)
if (initialized) {
// dereference per session
getInterpreterProcess().dereference();
}
for (Interpreter intp : new ArrayList<>(interpreters)) {
Interpreter p = intp;
while (p instanceof WrappedInterpreter) {
p = ((WrappedInterpreter) p).getInnerInterpreter();
}
try {
((RemoteInterpreter) p).closeInterpreter();
} catch (InterpreterException e) {
logger.error("Failed to initialize interpreter: {}. Remove it from interpreterGroup",
p.getClassName());
interpreters.remove(p);
}
}
}
}
public void closeInterpreter() {
if (this.initialized == false) {
return;
}
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
boolean broken = false;
try {
client = interpreterProcess.getClient();
if (client != null) {
client.close(sessionKey, className);
}
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} catch (Exception e1) {
throw new InterpreterException(e1);
} finally {
if (client != null) {
interpreterProcess.releaseClient(client, broken);
}
this.initialized = false;
}
}
@Override
public InterpreterResult interpret(String st, InterpreterContext context) {
if (logger.isDebugEnabled()) {
logger.debug("st:\n{}", st);
}
FormType form = getFormType();
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
InterpreterContextRunnerPool interpreterContextRunnerPool = interpreterProcess
.getInterpreterContextRunnerPool();
List<InterpreterContextRunner> runners = context.getRunners();
if (runners != null && runners.size() != 0) {
// assume all runners in this InterpreterContext have the same note id
String noteId = runners.get(0).getNoteId();
interpreterContextRunnerPool.clear(noteId);
interpreterContextRunnerPool.addAll(noteId, runners);
}
boolean broken = false;
try {
final GUI currentGUI = context.getGui();
RemoteInterpreterResult remoteResult = client.interpret(
sessionKey, className, st, convert(context));
Map<String, Object> remoteConfig = (Map<String, Object>) gson.fromJson(
remoteResult.getConfig(), new TypeToken<Map<String, Object>>() {
}.getType());
context.getConfig().clear();
context.getConfig().putAll(remoteConfig);
if (form == FormType.NATIVE) {
GUI remoteGui = gson.fromJson(remoteResult.getGui(), GUI.class);
currentGUI.clear();
currentGUI.setParams(remoteGui.getParams());
currentGUI.setForms(remoteGui.getForms());
} else if (form == FormType.SIMPLE) {
final Map<String, Input> currentForms = currentGUI.getForms();
final Map<String, Object> currentParams = currentGUI.getParams();
final GUI remoteGUI = gson.fromJson(remoteResult.getGui(), GUI.class);
final Map<String, Input> remoteForms = remoteGUI.getForms();
final Map<String, Object> remoteParams = remoteGUI.getParams();
currentForms.putAll(remoteForms);
currentParams.putAll(remoteParams);
}
InterpreterResult result = convert(remoteResult);
return result;
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client, broken);
}
}
@Override
public void cancel(InterpreterContext context) {
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
boolean broken = false;
try {
client.cancel(sessionKey, className, convert(context));
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client, broken);
}
}
@Override
public FormType getFormType() {
open();
if (formType != null) {
return formType;
}
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
boolean broken = false;
try {
formType = FormType.valueOf(client.getFormType(sessionKey, className));
return formType;
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client, broken);
}
}
@Override
public int getProgress(InterpreterContext context) {
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
if (interpreterProcess == null || !interpreterProcess.isRunning()) {
return 0;
}
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
boolean broken = false;
try {
return client.getProgress(sessionKey, className, convert(context));
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client, broken);
}
}
@Override
public List<InterpreterCompletion> completion(String buf, int cursor) {
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
boolean broken = false;
try {
List completion = client.completion(sessionKey, className, buf, cursor);
return completion;
} catch (TException e) {
broken = true;
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client, broken);
}
}
@Override
public Scheduler getScheduler() {
int maxConcurrency = maxPoolSize;
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
if (interpreterProcess == null) {
return null;
} else {
return SchedulerFactory.singleton().createOrGetRemoteScheduler(
RemoteInterpreter.class.getName() + sessionKey + interpreterProcess.hashCode(),
sessionKey, interpreterProcess, maxConcurrency);
}
}
private String getInterpreterGroupKey(InterpreterGroup interpreterGroup) {
return interpreterGroup.getId();
}
private RemoteInterpreterContext convert(InterpreterContext ic) {
return new RemoteInterpreterContext(ic.getNoteId(), ic.getParagraphId(), ic.getReplName(),
ic.getParagraphTitle(), ic.getParagraphText(), gson.toJson(ic.getAuthenticationInfo()),
gson.toJson(ic.getConfig()), gson.toJson(ic.getGui()), gson.toJson(ic.getRunners()));
}
private InterpreterResult convert(RemoteInterpreterResult result) {
InterpreterResult r = new InterpreterResult(
InterpreterResult.Code.valueOf(result.getCode()));
for (RemoteInterpreterResultMessage m : result.getMsg()) {
r.add(InterpreterResult.Type.valueOf(m.getType()), m.getData());
}
return r;
}
/**
* Push local angular object registry to
* remote interpreter. This method should be
* call ONLY inside the init() method
*/
void pushAngularObjectRegistryToRemote(Client client) throws TException {
final AngularObjectRegistry angularObjectRegistry = this.getInterpreterGroup()
.getAngularObjectRegistry();
if (angularObjectRegistry != null && angularObjectRegistry.getRegistry() != null) {
final Map<String, Map<String, AngularObject>> registry = angularObjectRegistry
.getRegistry();
logger.info("Push local angular object registry from ZeppelinServer to" +
" remote interpreter group {}", this.getInterpreterGroup().getId());
final java.lang.reflect.Type registryType = new TypeToken<Map<String,
Map<String, AngularObject>>>() {
}.getType();
Gson gson = new Gson();
client.angularRegistryPush(gson.toJson(registry, registryType));
}
}
public Map<String, String> getEnv() {
return env;
}
public void setEnv(Map<String, String> env) {
this.env = env;
}
public void addEnv(Map<String, String> env) {
if (this.env == null) {
this.env = new HashMap<>();
}
this.env.putAll(env);
}
//Only for test
public String getInterpreterRunner() {
return interpreterRunner;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcessListener.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcessListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import org.apache.zeppelin.interpreter.InterpreterResult;
import java.util.Map;
/**
* Event from remoteInterpreterProcess
*/
public interface RemoteInterpreterProcessListener {
public void onOutputAppend(String noteId, String paragraphId, int index, String output);
public void onOutputUpdated(
String noteId, String paragraphId, int index, InterpreterResult.Type type, String output);
public void onOutputClear(String noteId, String paragraphId);
public void onMetaInfosReceived(String settingId, Map<String, String> metaInfos);
public void onRemoteRunParagraph(String noteId, String ParagraphID) throws Exception;
public void onGetParagraphRunners(
String noteId, String paragraphId, RemoteWorksEventListener callback);
/**
* Remote works for Interpreter callback listener
*/
public interface RemoteWorksEventListener {
public void onFinished(Object resultObject);
public void onError();
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcess.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcess.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import com.google.gson.Gson;
import org.apache.commons.pool2.impl.GenericObjectPool;
import org.apache.thrift.TException;
import org.apache.zeppelin.helium.ApplicationEventListener;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Abstract class for interpreter process
*/
public abstract class RemoteInterpreterProcess {
private static final Logger logger = LoggerFactory.getLogger(RemoteInterpreterProcess.class);
// number of sessions that are attached to this process
private final AtomicInteger referenceCount;
private GenericObjectPool<Client> clientPool;
private final RemoteInterpreterEventPoller remoteInterpreterEventPoller;
private final InterpreterContextRunnerPool interpreterContextRunnerPool;
private int connectTimeout;
public RemoteInterpreterProcess(
int connectTimeout,
RemoteInterpreterProcessListener listener,
ApplicationEventListener appListener) {
this(new RemoteInterpreterEventPoller(listener, appListener),
connectTimeout);
}
RemoteInterpreterProcess(RemoteInterpreterEventPoller remoteInterpreterEventPoller,
int connectTimeout) {
this.interpreterContextRunnerPool = new InterpreterContextRunnerPool();
referenceCount = new AtomicInteger(0);
this.remoteInterpreterEventPoller = remoteInterpreterEventPoller;
this.connectTimeout = connectTimeout;
}
public abstract String getHost();
public abstract int getPort();
public abstract void start(String userName, Boolean isUserImpersonate);
public abstract void stop();
public abstract boolean isRunning();
public int getConnectTimeout() {
return connectTimeout;
}
public int reference(InterpreterGroup interpreterGroup, String userName,
Boolean isUserImpersonate) {
synchronized (referenceCount) {
if (!isRunning()) {
start(userName, isUserImpersonate);
}
if (clientPool == null) {
clientPool = new GenericObjectPool<>(new ClientFactory(getHost(), getPort()));
clientPool.setTestOnBorrow(true);
remoteInterpreterEventPoller.setInterpreterGroup(interpreterGroup);
remoteInterpreterEventPoller.setInterpreterProcess(this);
remoteInterpreterEventPoller.start();
}
return referenceCount.incrementAndGet();
}
}
public Client getClient() throws Exception {
if (clientPool == null || clientPool.isClosed()) {
return null;
}
return clientPool.borrowObject();
}
public void releaseClient(Client client) {
releaseClient(client, false);
}
public void releaseClient(Client client, boolean broken) {
if (broken) {
releaseBrokenClient(client);
} else {
try {
clientPool.returnObject(client);
} catch (Exception e) {
logger.warn("exception occurred during releasing thrift client", e);
}
}
}
public void releaseBrokenClient(Client client) {
try {
clientPool.invalidateObject(client);
} catch (Exception e) {
logger.warn("exception occurred during releasing thrift client", e);
}
}
public int dereference() {
synchronized (referenceCount) {
int r = referenceCount.decrementAndGet();
if (r == 0) {
logger.info("shutdown interpreter process");
remoteInterpreterEventPoller.shutdown();
// first try shutdown
Client client = null;
try {
client = getClient();
client.shutdown();
} catch (Exception e) {
// safely ignore exception while client.shutdown() may terminates remote process
logger.info("Exception in RemoteInterpreterProcess while synchronized dereference, can " +
"safely ignore exception while client.shutdown() may terminates remote process");
logger.debug(e.getMessage(), e);
} finally {
if (client != null) {
// no longer used
releaseBrokenClient(client);
}
}
clientPool.clear();
clientPool.close();
// wait for some time (connectTimeout) and force kill
// remote process server.serve() loop is not always finishing gracefully
long startTime = System.currentTimeMillis();
while (System.currentTimeMillis() - startTime < connectTimeout) {
if (this.isRunning()) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
logger.error("Exception in RemoteInterpreterProcess while synchronized dereference " +
"Thread.sleep", e);
}
} else {
break;
}
}
}
return r;
}
}
public int referenceCount() {
synchronized (referenceCount) {
return referenceCount.get();
}
}
public int getNumActiveClient() {
if (clientPool == null) {
return 0;
} else {
return clientPool.getNumActive();
}
}
public int getNumIdleClient() {
if (clientPool == null) {
return 0;
} else {
return clientPool.getNumIdle();
}
}
public void setMaxPoolSize(int size) {
if (clientPool != null) {
//Size + 2 for progress poller , cancel operation
clientPool.setMaxTotal(size + 2);
}
}
public int getMaxPoolSize() {
if (clientPool != null) {
return clientPool.getMaxTotal();
} else {
return 0;
}
}
/**
* Called when angular object is updated in client side to propagate
* change to the remote process
* @param name
* @param o
*/
public void updateRemoteAngularObject(String name, String noteId, String paragraphId, Object o) {
Client client = null;
try {
client = getClient();
} catch (NullPointerException e) {
// remote process not started
logger.info("NullPointerException in RemoteInterpreterProcess while " +
"updateRemoteAngularObject getClient, remote process not started", e);
return;
} catch (Exception e) {
logger.error("Can't update angular object", e);
}
boolean broken = false;
try {
Gson gson = new Gson();
client.angularObjectUpdate(name, noteId, paragraphId, gson.toJson(o));
} catch (TException e) {
broken = true;
logger.error("Can't update angular object", e);
} catch (NullPointerException e) {
logger.error("Remote interpreter process not started", e);
return;
} finally {
if (client != null) {
releaseClient(client, broken);
}
}
}
public InterpreterContextRunnerPool getInterpreterContextRunnerPool() {
return interpreterContextRunnerPool;
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/ClientFactory.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/ClientFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.pool2.BasePooledObjectFactory;
import org.apache.commons.pool2.PooledObject;
import org.apache.commons.pool2.impl.DefaultPooledObject;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransportException;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
/**
*
*/
public class ClientFactory extends BasePooledObjectFactory<Client>{
private String host;
private int port;
Map<Client, TSocket> clientSocketMap = new HashMap<>();
public ClientFactory(String host, int port) {
this.host = host;
this.port = port;
}
@Override
public Client create() throws Exception {
TSocket transport = new TSocket(host, port);
try {
transport.open();
} catch (TTransportException e) {
throw new InterpreterException(e);
}
TProtocol protocol = new TBinaryProtocol(transport);
Client client = new RemoteInterpreterService.Client(protocol);
synchronized (clientSocketMap) {
clientSocketMap.put(client, transport);
}
return client;
}
@Override
public PooledObject<Client> wrap(Client client) {
return new DefaultPooledObject<>(client);
}
@Override
public void destroyObject(PooledObject<Client> p) {
synchronized (clientSocketMap) {
if (clientSocketMap.containsKey(p.getObject())) {
clientSocketMap.get(p.getObject()).close();
clientSocketMap.remove(p.getObject());
}
}
}
@Override
public boolean validateObject(PooledObject<Client> p) {
return p.getObject().getOutputProtocol().getTransport().isOpen();
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterEventClient.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterEventClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import com.google.gson.Gson;
import org.apache.zeppelin.display.AngularObject;
import org.apache.zeppelin.interpreter.InterpreterContextRunner;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResultMessage;
import org.apache.zeppelin.interpreter.RemoteZeppelinServerResource;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterEvent;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterEventType;
import org.apache.zeppelin.interpreter.thrift.ZeppelinServerResourceParagraphRunner;
import org.apache.zeppelin.resource.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* Thread connection ZeppelinServer -> RemoteInterpreterServer does not provide
* remote method invocation from RemoteInterpreterServer -> ZeppelinServer
*
* This class provides event send and get response from RemoteInterpreterServer to
* ZeppelinServer.
*
* RemoteInterpreterEventPoller is counter part in ZeppelinServer
*/
public class RemoteInterpreterEventClient implements ResourcePoolConnector {
private final Logger logger = LoggerFactory.getLogger(RemoteInterpreterEvent.class);
private final List<RemoteInterpreterEvent> eventQueue = new LinkedList<>();
private final List<ResourceSet> getAllResourceResponse = new LinkedList<>();
private final Map<ResourceId, Object> getResourceResponse = new HashMap<>();
private final Gson gson = new Gson();
/**
* Run paragraph
* @param runner
*/
public void getZeppelinServerNoteRunner(
String eventOwnerKey, ZeppelinServerResourceParagraphRunner runner) {
RemoteZeppelinServerResource eventBody = new RemoteZeppelinServerResource();
eventBody.setResourceType(RemoteZeppelinServerResource.Type.PARAGRAPH_RUNNERS);
eventBody.setOwnerKey(eventOwnerKey);
eventBody.setData(runner);
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.REMOTE_ZEPPELIN_SERVER_RESOURCE,
gson.toJson(eventBody)));
}
/**
* Run paragraph
* @param runner
*/
public void run(InterpreterContextRunner runner) {
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.RUN_INTERPRETER_CONTEXT_RUNNER,
gson.toJson(runner)));
}
/**
* notify new angularObject creation
* @param object
*/
public void angularObjectAdd(AngularObject object) {
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.ANGULAR_OBJECT_ADD, gson.toJson(object)));
}
/**
* notify angularObject update
*/
public void angularObjectUpdate(AngularObject object) {
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.ANGULAR_OBJECT_UPDATE, gson.toJson(object)));
}
/**
* notify angularObject removal
*/
public void angularObjectRemove(String name, String noteId, String paragraphId) {
Map<String, String> removeObject = new HashMap<>();
removeObject.put("name", name);
removeObject.put("noteId", noteId);
removeObject.put("paragraphId", paragraphId);
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.ANGULAR_OBJECT_REMOVE, gson.toJson(removeObject)));
}
/**
* Get all resources except for specific resourcePool
* @return
*/
@Override
public ResourceSet getAllResources() {
// request
sendEvent(new RemoteInterpreterEvent(RemoteInterpreterEventType.RESOURCE_POOL_GET_ALL, null));
synchronized (getAllResourceResponse) {
while (getAllResourceResponse.isEmpty()) {
try {
getAllResourceResponse.wait();
} catch (InterruptedException e) {
logger.warn(e.getMessage(), e);
}
}
ResourceSet resourceSet = getAllResourceResponse.remove(0);
return resourceSet;
}
}
@Override
public Object readResource(ResourceId resourceId) {
logger.debug("Request Read Resource {} from ZeppelinServer", resourceId.getName());
synchronized (getResourceResponse) {
// wait for previous response consumed
while (getResourceResponse.containsKey(resourceId)) {
try {
getResourceResponse.wait();
} catch (InterruptedException e) {
logger.warn(e.getMessage(), e);
}
}
// send request
Gson gson = new Gson();
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.RESOURCE_GET,
gson.toJson(resourceId)));
// wait for response
while (!getResourceResponse.containsKey(resourceId)) {
try {
getResourceResponse.wait();
} catch (InterruptedException e) {
logger.warn(e.getMessage(), e);
}
}
Object o = getResourceResponse.remove(resourceId);
getResourceResponse.notifyAll();
return o;
}
}
/**
* Supposed to call from RemoteInterpreterEventPoller
*/
public void putResponseGetAllResources(List<String> resources) {
logger.debug("ResourceSet from ZeppelinServer");
ResourceSet resourceSet = new ResourceSet();
for (String res : resources) {
RemoteResource resource = gson.fromJson(res, RemoteResource.class);
resource.setResourcePoolConnector(this);
resourceSet.add(resource);
}
synchronized (getAllResourceResponse) {
getAllResourceResponse.add(resourceSet);
getAllResourceResponse.notify();
}
}
/**
* Supposed to call from RemoteInterpreterEventPoller
* @param resourceId json serialized ResourceId
* @param object java serialized of the object
*/
public void putResponseGetResource(String resourceId, ByteBuffer object) {
ResourceId rid = gson.fromJson(resourceId, ResourceId.class);
logger.debug("Response resource {} from RemoteInterpreter", rid.getName());
Object o = null;
try {
o = Resource.deserializeObject(object);
} catch (IOException e) {
logger.error(e.getMessage(), e);
} catch (ClassNotFoundException e) {
logger.error(e.getMessage(), e);
}
synchronized (getResourceResponse) {
getResourceResponse.put(rid, o);
getResourceResponse.notifyAll();
}
}
/**
* Supposed to call from RemoteInterpreterEventPoller
* @return next available event
*/
public RemoteInterpreterEvent pollEvent() {
synchronized (eventQueue) {
if (eventQueue.isEmpty()) {
try {
eventQueue.wait(1000);
} catch (InterruptedException e) {
}
}
if (eventQueue.isEmpty()) {
return new RemoteInterpreterEvent(RemoteInterpreterEventType.NO_OP, "");
} else {
RemoteInterpreterEvent event = eventQueue.remove(0);
logger.debug("Send event {}", event.getType());
return event;
}
}
}
public void onInterpreterOutputAppend(
String noteId, String paragraphId, int outputIndex, String output) {
Map<String, String> appendOutput = new HashMap<>();
appendOutput.put("noteId", noteId);
appendOutput.put("paragraphId", paragraphId);
appendOutput.put("index", Integer.toString(outputIndex));
appendOutput.put("data", output);
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.OUTPUT_APPEND,
gson.toJson(appendOutput)));
}
public void onInterpreterOutputUpdate(
String noteId, String paragraphId, int outputIndex,
InterpreterResult.Type type, String output) {
Map<String, String> appendOutput = new HashMap<>();
appendOutput.put("noteId", noteId);
appendOutput.put("paragraphId", paragraphId);
appendOutput.put("index", Integer.toString(outputIndex));
appendOutput.put("type", type.name());
appendOutput.put("data", output);
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.OUTPUT_UPDATE,
gson.toJson(appendOutput)));
}
public void onInterpreterOutputUpdateAll(
String noteId, String paragraphId, List<InterpreterResultMessage> messages) {
Map<String, Object> appendOutput = new HashMap<>();
appendOutput.put("noteId", noteId);
appendOutput.put("paragraphId", paragraphId);
appendOutput.put("messages", messages);
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.OUTPUT_UPDATE_ALL,
gson.toJson(appendOutput)));
}
private void sendEvent(RemoteInterpreterEvent event) {
synchronized (eventQueue) {
eventQueue.add(event);
eventQueue.notifyAll();
}
}
public void onAppOutputAppend(
String noteId, String paragraphId, int index, String appId, String output) {
Map<String, Object> appendOutput = new HashMap<>();
appendOutput.put("noteId", noteId);
appendOutput.put("paragraphId", paragraphId);
appendOutput.put("index", Integer.toString(index));
appendOutput.put("appId", appId);
appendOutput.put("data", output);
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.OUTPUT_APPEND,
gson.toJson(appendOutput)));
}
public void onAppOutputUpdate(
String noteId, String paragraphId, int index, String appId,
InterpreterResult.Type type, String output) {
Map<String, Object> appendOutput = new HashMap<>();
appendOutput.put("noteId", noteId);
appendOutput.put("paragraphId", paragraphId);
appendOutput.put("index", Integer.toString(index));
appendOutput.put("appId", appId);
appendOutput.put("type", type);
appendOutput.put("data", output);
logger.info("onAppoutputUpdate = {}", output);
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.OUTPUT_UPDATE,
gson.toJson(appendOutput)));
}
public void onAppStatusUpdate(String noteId, String paragraphId, String appId, String status) {
Map<String, String> appendOutput = new HashMap<>();
appendOutput.put("noteId", noteId);
appendOutput.put("paragraphId", paragraphId);
appendOutput.put("appId", appId);
appendOutput.put("status", status);
sendEvent(new RemoteInterpreterEvent(
RemoteInterpreterEventType.APP_STATUS_UPDATE,
gson.toJson(appendOutput)));
}
public void onMetaInfosReceived(Map<String, String> infos) {
sendEvent(new RemoteInterpreterEvent(RemoteInterpreterEventType.META_INFOS,
gson.toJson(infos)));
}
/**
* Wait for eventQueue becomes empty
*/
public void waitForEventQueueBecomesEmpty(long atMost) {
long startTime = System.currentTimeMillis();
synchronized (eventQueue) {
while (!eventQueue.isEmpty() && (System.currentTimeMillis() - startTime) < atMost) {
try {
eventQueue.wait(100);
} catch (InterruptedException e) {
// ignore exception
}
}
if (!eventQueue.isEmpty())
eventQueue.clear();
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterRunningProcess.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterRunningProcess.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import org.apache.zeppelin.helium.ApplicationEventListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class connects to existing process
*/
public class RemoteInterpreterRunningProcess extends RemoteInterpreterProcess {
private final Logger logger = LoggerFactory.getLogger(RemoteInterpreterRunningProcess.class);
private final String host;
private final int port;
public RemoteInterpreterRunningProcess(
int connectTimeout,
RemoteInterpreterProcessListener listener,
ApplicationEventListener appListener,
String host,
int port
) {
super(connectTimeout, listener, appListener);
this.host = host;
this.port = port;
}
@Override
public String getHost() {
return host;
}
@Override
public int getPort() {
return port;
}
@Override
public void start(String userName, Boolean isUserImpersonate) {
// assume process is externally managed. nothing to do
}
@Override
public void stop() {
// assume process is externally managed. nothing to do
}
@Override
public boolean isRunning() {
return RemoteInterpreterUtils.checkIfRemoteEndpointAccessible(getHost(), getPort());
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/AppendOutputRunner.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/AppendOutputRunner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This thread sends paragraph's append-data
* periodically, rather than continously, with
* a period of BUFFER_TIME_MS. It handles append-data
* for all paragraphs across all notebooks.
*/
public class AppendOutputRunner implements Runnable {
private static final Logger logger =
LoggerFactory.getLogger(AppendOutputRunner.class);
public static final Long BUFFER_TIME_MS = new Long(100);
private static final Long SAFE_PROCESSING_TIME = new Long(10);
private static final Long SAFE_PROCESSING_STRING_SIZE = new Long(100000);
private final BlockingQueue<AppendOutputBuffer> queue = new LinkedBlockingQueue<>();
private final RemoteInterpreterProcessListener listener;
public AppendOutputRunner(RemoteInterpreterProcessListener listener) {
this.listener = listener;
}
@Override
public void run() {
Map<String, StringBuilder> stringBufferMap = new HashMap<>();
List<AppendOutputBuffer> list = new LinkedList<>();
/* "drainTo" method does not wait for any element
* to be present in the queue, and thus this loop would
* continuosly run (with period of BUFFER_TIME_MS). "take()" method
* waits for the queue to become non-empty and then removes
* one element from it. Rest elements from queue (if present) are
* removed using "drainTo" method. Thus we save on some un-necessary
* cpu-cycles.
*/
try {
list.add(queue.take());
} catch (InterruptedException e) {
logger.error("Wait for OutputBuffer queue interrupted: " + e.getMessage());
}
Long processingStartTime = System.currentTimeMillis();
queue.drainTo(list);
for (AppendOutputBuffer buffer: list) {
String noteId = buffer.getNoteId();
String paragraphId = buffer.getParagraphId();
int index = buffer.getIndex();
String stringBufferKey = noteId + ":" + paragraphId + ":" + index;
StringBuilder builder = stringBufferMap.containsKey(stringBufferKey) ?
stringBufferMap.get(stringBufferKey) : new StringBuilder();
builder.append(buffer.getData());
stringBufferMap.put(stringBufferKey, builder);
}
Long processingTime = System.currentTimeMillis() - processingStartTime;
if (processingTime > SAFE_PROCESSING_TIME) {
logger.warn("Processing time for buffered append-output is high: " +
processingTime + " milliseconds.");
} else {
logger.debug("Processing time for append-output took "
+ processingTime + " milliseconds");
}
Long sizeProcessed = new Long(0);
for (String stringBufferKey : stringBufferMap.keySet()) {
StringBuilder buffer = stringBufferMap.get(stringBufferKey);
sizeProcessed += buffer.length();
String[] keys = stringBufferKey.split(":");
listener.onOutputAppend(keys[0], keys[1], Integer.parseInt(keys[2]), buffer.toString());
}
if (sizeProcessed > SAFE_PROCESSING_STRING_SIZE) {
logger.warn("Processing size for buffered append-output is high: " +
sizeProcessed + " characters.");
} else {
logger.debug("Processing size for append-output is " +
sizeProcessed + " characters");
}
}
public void appendBuffer(String noteId, String paragraphId, int index, String outputToAppend) {
queue.offer(new AppendOutputBuffer(noteId, paragraphId, index, outputToAppend));
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteAngularObject.java | smart-zeppelin/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteAngularObject.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import org.apache.zeppelin.display.AngularObject;
import org.apache.zeppelin.display.AngularObjectListener;
import org.apache.zeppelin.interpreter.InterpreterGroup;
/**
* Proxy for AngularObject that exists in remote interpreter process
*/
public class RemoteAngularObject extends AngularObject {
private transient InterpreterGroup interpreterGroup;
RemoteAngularObject(String name, Object o, String noteId, String paragraphId,
InterpreterGroup interpreterGroup,
AngularObjectListener listener) {
super(name, o, noteId, paragraphId, listener);
this.interpreterGroup = interpreterGroup;
}
@Override
public void set(Object o, boolean emit) {
set(o, emit, true);
}
public void set(Object o, boolean emitWeb, boolean emitRemoteProcess) {
super.set(o, emitWeb);
if (emitRemoteProcess) {
// send updated value to remote interpreter
interpreterGroup.getRemoteInterpreterProcess().
updateRemoteAngularObject(
getName(), getNoteId(), getParagraphId(), o);
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-metastore/src/test/java/org/smartdata/metastore/TestMetaStore.java | smart-metastore/src/test/java/org/smartdata/metastore/TestMetaStore.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartdata.metastore;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.smartdata.metrics.FileAccessEvent;
import org.smartdata.model.ActionInfo;
import org.smartdata.model.BackUpInfo;
import org.smartdata.model.CachedFileStatus;
import org.smartdata.model.ClusterConfig;
import org.smartdata.model.ClusterInfo;
import org.smartdata.model.CmdletInfo;
import org.smartdata.model.CmdletState;
import org.smartdata.model.CompressionFileState;
import org.smartdata.model.DataNodeInfo;
import org.smartdata.model.DataNodeStorageInfo;
import org.smartdata.model.FileInfo;
import org.smartdata.model.FileState;
import org.smartdata.model.GlobalConfig;
import org.smartdata.model.NormalFileState;
import org.smartdata.model.RuleInfo;
import org.smartdata.model.RuleState;
import org.smartdata.model.StorageCapacity;
import org.smartdata.model.StoragePolicy;
import org.smartdata.model.SystemInfo;
import org.smartdata.model.XAttribute;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
public class TestMetaStore extends TestDaoUtil {
private MetaStore metaStore;
@Before
public void metaInit() throws Exception {
initDao();
metaStore = new MetaStore(druidPool);
}
@After
public void metaClose() throws Exception {
closeDao();
if (metaStore != null) {
metaStore = null;
}
}
@Test
public void testHighConcurrency() throws Exception {
// Multiple threads
Thread th1 = new InsertThread(metaStore);
Thread th2 = new SelectUpdateThread(metaStore);
th1.start();
Thread.sleep(1000);
th2.start();
th2.join();
}
@Test
public void testThreadSleepConcurrency() throws Exception {
// Multiple threads
Thread th1 = new InsertThread(metaStore);
Thread th2 = new SleepSelectUpdateThread(metaStore);
th1.start();
Thread.sleep(1000);
th2.start();
th2.join();
}
class SleepSelectUpdateThread extends Thread {
private MetaStore metaStore;
public SleepSelectUpdateThread(MetaStore metaStore) {
this.metaStore = metaStore;
}
public void run() {
for (int i = 0; i < 100; i++) {
try {
List<ActionInfo> actionInfoList =
metaStore.getActions(Arrays.asList((long) i));
actionInfoList.get(0).setFinished(true);
actionInfoList.get(0).setFinishTime(System.currentTimeMillis());
sleep(5);
metaStore.updateActions(actionInfoList.toArray(new ActionInfo[actionInfoList.size()]));
metaStore.getActions(null, null);
} catch (MetaStoreException | InterruptedException e) {
System.out.println(e.getMessage());
Assert.assertTrue(false);
}
}
}
}
class InsertThread extends Thread {
private MetaStore metaStore;
public InsertThread(MetaStore metaStore) {
this.metaStore = metaStore;
}
public void run() {
Map<String, String> args = new HashMap();
ActionInfo actionInfo =
new ActionInfo(1, 1, "cache", args, "Test", "Test", true, 123213213L, true, 123123L, 100);
for (int i = 0; i < 100; i++) {
actionInfo.setActionId(i);
try {
metaStore.insertAction(actionInfo);
} catch (MetaStoreException e) {
System.out.println(e.getMessage());
Assert.assertTrue(false);
}
}
}
}
class SelectUpdateThread extends Thread {
private MetaStore metaStore;
public SelectUpdateThread(MetaStore metaStore) {
this.metaStore = metaStore;
}
public void run() {
for (int i = 0; i < 100; i++) {
try {
List<ActionInfo> actionInfoList =
metaStore.getActions(Arrays.asList((long) i));
actionInfoList.get(0).setFinished(true);
actionInfoList.get(0).setFinishTime(System.currentTimeMillis());
metaStore.updateActions(actionInfoList.toArray(new ActionInfo[actionInfoList.size()]));
metaStore.getActions(null, null);
} catch (MetaStoreException e) {
System.out.println(e.getMessage());
Assert.assertTrue(false);
}
}
}
}
@Test
public void testGetFiles() throws Exception {
String pathString = "/tmp/des";
long length = 123L;
boolean isDir = false;
int blockReplication = 1;
long blockSize = 128 * 1024L;
long modTime = 123123123L;
long accessTime = 123123120L;
String owner = "root";
String group = "admin";
long fileId = 56L;
byte storagePolicy = 0;
byte erasureCodingPolicy = 0;
FileInfo fileInfo =
new FileInfo(
pathString,
fileId,
length,
isDir,
(short) blockReplication,
blockSize,
modTime,
accessTime,
(short) 1,
owner,
group,
storagePolicy,
erasureCodingPolicy);
metaStore.insertFile(fileInfo);
FileInfo dbFileInfo = metaStore.getFile(56);
Assert.assertTrue(dbFileInfo.equals(fileInfo));
dbFileInfo = metaStore.getFile("/tmp/des");
Assert.assertTrue(dbFileInfo.equals(fileInfo));
}
@Test
public void testGetNonExistFile() throws Exception {
FileInfo info = metaStore.getFile("/non_exist_file_path");
Assert.assertTrue(info == null);
}
@Test
public void testInsertStoragesTable() throws Exception {
StorageCapacity storage1 = new StorageCapacity("Flash", 12343333L, 2223333L);
StorageCapacity storage2 = new StorageCapacity("RAM", 12342233L, 2223663L);
StorageCapacity[] storages = {storage1, storage2};
metaStore.insertUpdateStoragesTable(storages);
StorageCapacity storageCapacity1 = metaStore.getStorageCapacity("Flash");
StorageCapacity storageCapacity2 = metaStore.getStorageCapacity("RAM");
Assert.assertTrue(storageCapacity1.equals(storage1));
Assert.assertTrue(storageCapacity2.equals(storage2));
Assert.assertTrue(metaStore.updateStoragesTable("Flash", 123456L, 4562233L));
Assert.assertTrue(metaStore.getStorageCapacity("Flash").getCapacity() == 123456L);
}
@Test
public void testGetStoreCapacityOfDifferentStorageType() throws Exception {
DataNodeStorageInfo info1 = new DataNodeStorageInfo("1", "ssd", 1, "1", 1, 1, 1, 1, 1);
DataNodeStorageInfo info2 = new DataNodeStorageInfo("2", "ssd", 2, "2", 2, 2, 2, 2, 2);
metaStore.insertDataNodeStorageInfo(info1);
metaStore.insertDataNodeStorageInfo(info2);
long capacity = metaStore.getStoreCapacityOfDifferentStorageType("ssd");
Assert.assertTrue(capacity == 3);
}
@Test
public void testGetStoreFreeOfDifferentStorageType() throws Exception {
DataNodeStorageInfo info1 = new DataNodeStorageInfo("1", "ssd", 1, "1", 1, 1, 1, 1, 1);
DataNodeStorageInfo info2 = new DataNodeStorageInfo("2", "ssd", 2, "2", 2, 2, 2, 2, 2);
metaStore.insertDataNodeStorageInfo(info1);
metaStore.insertDataNodeStorageInfo(info2);
long free = metaStore.getStoreFreeOfDifferentStorageType("ssd");
Assert.assertTrue(free == 3);
}
@Test
public void testGetStorageCapacity() throws Exception {
StorageCapacity storage1 = new StorageCapacity("HDD", 12343333L, 2223333L);
StorageCapacity storage2 = new StorageCapacity("RAM", 12342233L, 2223663L);
StorageCapacity[] storages = {storage1, storage2};
metaStore.insertUpdateStoragesTable(storages);
Assert.assertTrue(metaStore.getStorageCapacity("HDD").equals(storage1));
Assert.assertTrue(metaStore.getStorageCapacity("RAM").equals(storage2));
StorageCapacity storage3 = new StorageCapacity("HDD", 100L, 10L);
metaStore.insertUpdateStoragesTable(storage3);
Assert.assertTrue(metaStore.getStorageCapacity("HDD").equals(storage3));
}
@Test
public void testInsertRule() throws Exception {
String rule = "file : accessCount(10m) > 20 \n\n" + "and length() > 3 | cache";
long submitTime = System.currentTimeMillis();
RuleInfo info1 = new RuleInfo(0, submitTime, rule, RuleState.ACTIVE, 0, 0, 0);
Assert.assertTrue(metaStore.insertNewRule(info1));
RuleInfo info11 = metaStore.getRuleInfo(info1.getId());
Assert.assertTrue(info1.equals(info11));
long now = System.currentTimeMillis();
metaStore.updateRuleInfo(info1.getId(), RuleState.DELETED, now, 1, 1);
info1.setState(RuleState.DELETED);
info1.setLastCheckTime(now);
info1.setNumChecked(1);
info1.setNumCmdsGen(1);
RuleInfo info12 = metaStore.getRuleInfo(info1.getId());
Assert.assertTrue(info12.equals(info1));
RuleInfo info2 = new RuleInfo(0, submitTime, rule, RuleState.ACTIVE, 0, 0, 0);
Assert.assertTrue(metaStore.insertNewRule(info2));
RuleInfo info21 = metaStore.getRuleInfo(info2.getId());
Assert.assertFalse(info11.equals(info21));
List<RuleInfo> infos = metaStore.getRuleInfo();
Assert.assertTrue(infos.size() == 2);
}
@Test
public void testMoveSyncRules() throws Exception {
String pathString = "/src/1";
long length = 123L;
boolean isDir = false;
int blockReplication = 1;
long blockSize = 128 * 1024L;
long modTime = 123123123L;
long accessTime = 123123120L;
String owner = "root";
String group = "admin";
long fileId = 56L;
byte storagePolicy = 0;
byte erasureCodingPolicy = 0;
FileInfo fileInfo =
new FileInfo(
pathString,
fileId,
length,
isDir,
(short) blockReplication,
blockSize,
modTime,
accessTime,
(short) 1,
owner,
group,
storagePolicy,
erasureCodingPolicy);
metaStore.insertFile(fileInfo);
Map<String, String> args = new HashMap();
args.put("-file", "/src/1");
String rule = "file : accessCount(10m) > 20 \n\n" + "and length() > 3 | ";
long submitTime = System.currentTimeMillis();
RuleInfo ruleInfo =
new RuleInfo(0, submitTime, rule + "sync -dest /dest/", RuleState.ACTIVE, 0, 0, 0);
metaStore.insertNewRule(ruleInfo);
metaStore.insertBackUpInfo(new BackUpInfo(ruleInfo.getId(), "/src/", "/dest/", 100));
metaStore.insertNewRule(
new RuleInfo(1, submitTime, rule + "allssd", RuleState.ACTIVE, 0, 0, 0));
metaStore.insertNewRule(
new RuleInfo(2, submitTime, rule + "archive", RuleState.ACTIVE, 0, 0, 0));
metaStore.insertNewRule(
new RuleInfo(2, submitTime, rule + "onessd", RuleState.ACTIVE, 0, 0, 0));
metaStore.insertNewRule(new RuleInfo(2, submitTime, rule + "cache", RuleState.ACTIVE, 0, 0, 0));
Assert.assertTrue(metaStore.listMoveRules().size() == 3);
Assert.assertTrue(metaStore.listSyncRules().size() == 1);
CmdletInfo cmdletInfo =
new CmdletInfo(1, ruleInfo.getId(), CmdletState.EXECUTING, "test", 123123333L, 232444444L);
cmdletInfo.setAids(Collections.singletonList(1L));
metaStore.insertCmdlet(cmdletInfo);
metaStore.insertAction(
new ActionInfo(1, 1, "allssd", args, "Test", "Test", true, 123213213L, true, 123123L, 100));
Assert.assertTrue(metaStore.listFileActions(ruleInfo.getId(), 0).size() >= 0);
}
@Test
public void testUpdateCachedFiles() throws Exception {
metaStore.insertCachedFiles(80L, "testPath", 1000L, 2000L, 100);
metaStore.insertCachedFiles(90L, "testPath2", 2000L, 3000L, 200);
Map<String, Long> pathToId = new HashMap<>();
pathToId.put("testPath", 80L);
pathToId.put("testPath2", 90L);
pathToId.put("testPath3", 100L);
List<FileAccessEvent> events = new ArrayList<>();
events.add(new FileAccessEvent("testPath", 3000L));
events.add(new FileAccessEvent("testPath", 4000L));
events.add(new FileAccessEvent("testPath2", 4000L));
events.add(new FileAccessEvent("testPath2", 5000L));
events.add(new FileAccessEvent("testPath3", 8000L));
events.add(new FileAccessEvent("testPath3", 9000L));
metaStore.updateCachedFiles(pathToId, events);
List<CachedFileStatus> statuses = metaStore.getCachedFileStatus();
Assert.assertTrue(statuses.size() == 2);
Map<Long, CachedFileStatus> statusMap = new HashMap<>();
for (CachedFileStatus status : statuses) {
statusMap.put(status.getFid(), status);
}
Assert.assertTrue(statusMap.containsKey(80L));
CachedFileStatus first = statusMap.get(80L);
Assert.assertTrue(first.getLastAccessTime() == 4000L);
Assert.assertTrue(first.getNumAccessed() == 102);
Assert.assertTrue(statusMap.containsKey(90L));
CachedFileStatus second = statusMap.get(90L);
Assert.assertTrue(second.getLastAccessTime() == 5000L);
Assert.assertTrue(second.getNumAccessed() == 202);
}
@Test
public void testInsertDeleteCachedFiles() throws Exception {
metaStore.insertCachedFiles(80L, "testPath", 123456L, 234567L, 456);
Assert.assertTrue(metaStore.getCachedFileStatus(80L).getFromTime() == 123456L);
// Update record with 80l id
Assert.assertTrue(metaStore.updateCachedFiles(80L, 234568L, 460));
Assert.assertTrue(metaStore.getCachedFileStatus().get(0).getLastAccessTime() == 234568L);
List<CachedFileStatus> list = new LinkedList<>();
list.add(new CachedFileStatus(321L, "testPath", 113334L, 222222L, 222));
metaStore.insertCachedFiles(list);
Assert.assertTrue(metaStore.getCachedFileStatus(321L).getNumAccessed() == 222);
Assert.assertTrue(metaStore.getCachedFileStatus().size() == 2);
// Delete one record
metaStore.deleteCachedFile(321L);
Assert.assertTrue(metaStore.getCachedFileStatus().size() == 1);
// Clear all records
metaStore.deleteAllCachedFile();
Assert.assertTrue(metaStore.getCachedFileStatus().size() == 0);
metaStore.insertCachedFiles(80L, "testPath", 123456L, 234567L, 456);
}
@Test
public void testGetCachedFileStatus() throws Exception {
metaStore.insertCachedFiles(6L, "testPath", 1490918400000L, 234567L, 456);
metaStore.insertCachedFiles(19L, "testPath", 1490918400000L, 234567L, 456);
metaStore.insertCachedFiles(23L, "testPath", 1490918400000L, 234567L, 456);
CachedFileStatus cachedFileStatus = metaStore.getCachedFileStatus(6);
Assert.assertTrue(cachedFileStatus.getFromTime() == 1490918400000L);
List<CachedFileStatus> cachedFileList = metaStore.getCachedFileStatus();
List<Long> fids = metaStore.getCachedFids();
Assert.assertTrue(fids.size() == 3);
Assert.assertTrue(cachedFileList.get(0).getFid() == 6);
Assert.assertTrue(cachedFileList.get(1).getFid() == 19);
Assert.assertTrue(cachedFileList.get(2).getFid() == 23);
}
@Test
public void testInsetFiles() throws Exception {
String pathString = "/tmp/testFile";
long length = 123L;
boolean isDir = false;
int blockReplication = 1;
long blockSize = 128 * 1024L;
long modTime = 123123123L;
long accessTime = 123123120L;
String owner = "root";
String group = "admin";
long fileId = 312321L;
byte storagePolicy = 0;
byte erasureCodingPolicy = 0;
FileInfo[] files = {
new FileInfo(
pathString,
fileId,
length,
isDir,
(short) blockReplication,
blockSize,
modTime,
accessTime,
(short) 1,
owner,
group,
storagePolicy,
erasureCodingPolicy)
};
metaStore.insertFiles(files);
FileInfo dbFileInfo = metaStore.getFile("/tmp/testFile");
Assert.assertTrue(dbFileInfo.equals(files[0]));
}
@Test
public void testInsertCmdletsTable() throws Exception {
CmdletInfo command1 =
new CmdletInfo(0, 1, CmdletState.EXECUTING, "test", 123123333L, 232444444L);
metaStore.insertCmdlet(command1);
CmdletInfo command2 = new CmdletInfo(1, 78, CmdletState.PAUSED, "tt", 123178333L, 232444994L);
metaStore.insertCmdlet(command2);
Assert.assertTrue(metaStore.getCmdletById(command1.getCid()).equals(command1));
Assert.assertTrue(metaStore.getCmdletById(command2.getCid()).equals(command2));
metaStore.updateCmdlet(command1.getCid(), "TestParameter", CmdletState.DRYRUN);
Assert.assertTrue(
metaStore.getCmdletById(command1.getCid()).getParameters().equals("TestParameter"));
Assert.assertTrue(
metaStore.getCmdletById(command1.getCid()).getState().equals(CmdletState.DRYRUN));
}
@Test
public void testdeleteFinishedCmdletsWithGenTimeBefore() throws Exception {
Map<String, String> args = new HashMap();
CmdletInfo command1 =
new CmdletInfo(0, 78, CmdletState.CANCELLED, "test", 123L, 232444444L);
metaStore.insertCmdlet(command1);
CmdletInfo command2 = new CmdletInfo(1, 78, CmdletState.DONE, "tt", 128L, 232444994L);
metaStore.insertCmdlet(command2);
ActionInfo actionInfo =
new ActionInfo(1, 0, "cache", args, "Test", "Test", true, 123213213L, true, 123123L, 100);
metaStore.insertAction(actionInfo);
ActionInfo actionInfo2 =
new ActionInfo(2, 1, "cache", args, "Test", "Test", true, 123213213L, true, 123123L, 100);
metaStore.insertAction(actionInfo2);
ActionInfo actionInfo3 =
new ActionInfo(3, 0, "cache", args, "Test", "Test", true, 123213213L, true, 123123L, 100);
metaStore.insertAction(actionInfo3);
metaStore.deleteFinishedCmdletsWithGenTimeBefore(125);
Assert.assertTrue(metaStore.getCmdletById(0) == null);
Assert.assertTrue(metaStore.getActionById(1) == null);
Assert.assertTrue(metaStore.getActionById(2) != null);
}
@Test
public void testdeleteKeepNewCmdlets() throws Exception {
Map<String, String> args = new HashMap();
CmdletInfo command1 =
new CmdletInfo(0, 78, CmdletState.CANCELLED, "test", 123L, 232444444L);
metaStore.insertCmdlet(command1);
CmdletInfo command2 = new CmdletInfo(1, 78, CmdletState.DONE, "tt", 128L, 232444994L);
metaStore.insertCmdlet(command2);
ActionInfo actionInfo =
new ActionInfo(1, 0, "cache", args, "Test", "Test", true, 123213213L, true, 123123L, 100);
metaStore.insertAction(actionInfo);
ActionInfo actionInfo2 =
new ActionInfo(2, 1, "cache", args, "Test", "Test", true, 123213213L, true, 123123L, 100);
metaStore.insertAction(actionInfo2);
ActionInfo actionInfo3 =
new ActionInfo(3, 0, "cache", args, "Test", "Test", true, 123213213L, true, 123123L, 100);
metaStore.insertAction(actionInfo3);
metaStore.deleteKeepNewCmdlets(1);
Assert.assertTrue(metaStore.getCmdletById(0) == null);
Assert.assertTrue(metaStore.getActionById(1) == null);
Assert.assertTrue(metaStore.getActionById(2) != null);
}
@Test
public void testUpdateDeleteCommand() throws Exception {
long commandId = 0;
commandId = metaStore.getMaxCmdletId();
System.out.printf("CommandID = %d\n", commandId);
CmdletInfo command1 = new CmdletInfo(0, 1, CmdletState.PENDING, "test", 123123333L, 232444444L);
CmdletInfo command2 = new CmdletInfo(1, 78, CmdletState.PENDING, "tt", 123178333L, 232444994L);
CmdletInfo[] commands = {command1, command2};
metaStore.insertCmdlets(commands);
String cidCondition = ">= 1 ";
String ridCondition = "= 78 ";
List<CmdletInfo> com = metaStore.getCmdlets(cidCondition, ridCondition, CmdletState.PENDING);
commandId = metaStore.getMaxCmdletId();
Assert.assertTrue(commandId == commands.length);
for (CmdletInfo cmd : com) {
// System.out.printf("Cid = %d \n", cmd.getCid());
metaStore.updateCmdlet(cmd.getCid(), CmdletState.DONE);
}
List<CmdletInfo> com1 = metaStore.getCmdlets(cidCondition, ridCondition, CmdletState.DONE);
Assert.assertTrue(com1.size() == 1);
Assert.assertTrue(com1.get(0).getState().equals(CmdletState.DONE));
metaStore.deleteCmdlet(command2.getCid());
com1 = metaStore.getCmdlets(cidCondition, ridCondition, CmdletState.DONE);
Assert.assertTrue(com1.size() == 0);
}
@Test
public void testInsertListActions() throws Exception {
Map<String, String> args = new HashMap();
ActionInfo actionInfo =
new ActionInfo(1, 1, "cache", args, "Test", "Test", true, 123213213L, true, 123123L, 100);
metaStore.insertActions(new ActionInfo[] {actionInfo});
List<ActionInfo> actionInfos = metaStore.getActions(null, null);
Assert.assertTrue(actionInfos.size() == 1);
actionInfo.setResult("Finished");
metaStore.updateActions(new ActionInfo[] {actionInfo});
actionInfos = metaStore.getActions(null, null);
Assert.assertTrue(actionInfos.get(0).equals(actionInfo));
}
@Test
public void testGetNewCreatedActions() throws Exception {
Map<String, String> args = new HashMap();
List<ActionInfo> actionInfos;
ActionInfo actionInfo =
new ActionInfo(1, 1, "cache", args, "Test", "Test", true, 123213213L, true, 123123L, 100);
metaStore.insertAction(actionInfo);
actionInfo.setActionId(2);
metaStore.insertAction(actionInfo);
actionInfos = metaStore.getNewCreatedActions(1);
Assert.assertTrue(actionInfos.size() == 1);
actionInfos = metaStore.getNewCreatedActions("cache", 1, true, true);
Assert.assertTrue(actionInfos.size() == 1);
actionInfos = metaStore.getNewCreatedActions(2);
Assert.assertTrue(actionInfos.size() == 2);
}
@Test
public void testGetMaxActionId() throws Exception {
long currentId = metaStore.getMaxActionId();
Map<String, String> args = new HashMap();
Assert.assertTrue(currentId == 0);
ActionInfo actionInfo =
new ActionInfo(
currentId, 1, "cache", args, "Test", "Test", true, 123213213L, true, 123123L, 100);
metaStore.insertActions(new ActionInfo[] {actionInfo});
currentId = metaStore.getMaxActionId();
Assert.assertTrue(currentId == 1);
actionInfo =
new ActionInfo(
currentId, 1, "cache", args, "Test", "Test", true, 123213213L, true, 123123L, 100);
metaStore.insertActions(new ActionInfo[] {actionInfo});
currentId = metaStore.getMaxActionId();
Assert.assertTrue(currentId == 2);
}
@Test
public void testInsertStoragePolicyTable() throws Exception {
metaStore.insertStoragePolicy(new StoragePolicy((byte) 53, "COOL"));
metaStore.insertStoragePolicy(new StoragePolicy((byte) 52, "COLD"));
String value = metaStore.getStoragePolicyName(53);
Assert.assertEquals(metaStore.getStoragePolicyName(52), "COLD");
int key = metaStore.getStoragePolicyID("COOL");
Assert.assertEquals(value, "COOL");
Assert.assertEquals(key, 53);
}
@Test
public void testInsertXattrTable() throws Exception {
long fid = 567L;
List<XAttribute> attributes = new ArrayList<>();
Random random = new Random();
byte[] value1 = new byte[1024];
byte[] value2 = new byte[1024];
random.nextBytes(value1);
random.nextBytes(value2);
attributes.add(new XAttribute("user", "a1", value1));
attributes.add(new XAttribute("raw", "you", value2));
Assert.assertTrue(metaStore.insertXattrList(fid, attributes));
List<XAttribute> result = metaStore.getXattrList(fid);
Assert.assertTrue(result.size() == attributes.size());
Assert.assertTrue(result.containsAll(attributes));
}
@Test
public void testSetClusterConfig() throws MetaStoreException {
ClusterConfig clusterConfig = new ClusterConfig(1, "test", "test1");
metaStore.setClusterConfig(clusterConfig);
List<ClusterConfig> list = new LinkedList<>();
list.add(clusterConfig);
Assert.assertTrue(metaStore.listClusterConfig().equals(list));
list.get(0).setConfig_path("test2");
metaStore.setClusterConfig(list.get(0));
Assert.assertTrue(metaStore.listClusterConfig().equals(list));
}
@Test
public void testDelClusterConfig() throws MetaStoreException {
ClusterConfig clusterConfig = new ClusterConfig(1, "test", "test1");
metaStore.setClusterConfig(clusterConfig);
metaStore.delClusterConfig(clusterConfig);
Assert.assertTrue(metaStore.listClusterConfig().size() == 0);
}
@Test
public void testSetGlobalConfig() throws MetaStoreException {
GlobalConfig globalConfig = new GlobalConfig(1, "test", "test1");
metaStore.setGlobalConfig(globalConfig);
Assert.assertTrue(metaStore.getDefaultGlobalConfigByName("test").equals(globalConfig));
globalConfig.setPropertyValue("test2");
metaStore.setGlobalConfig(globalConfig);
Assert.assertTrue(metaStore.getDefaultGlobalConfigByName("test").equals(globalConfig));
}
@Test
public void testInsertDataNodeInfo() throws Exception {
DataNodeInfo insertInfo1 = new DataNodeInfo("UUID1", "hostname", "www.ssm.com", 100, 50, "lab");
metaStore.insertDataNodeInfo(insertInfo1);
List<DataNodeInfo> getInfo1 = metaStore.getDataNodeInfoByUuid("UUID1");
Assert.assertTrue(insertInfo1.equals(getInfo1.get(0)));
DataNodeInfo insertInfo2 = new DataNodeInfo("UUID2", "HOSTNAME", "www.ssm.com", 0, 0, null);
DataNodeInfo insertInfo3 = new DataNodeInfo("UUID3", "HOSTNAME", "www.ssm.com", 0, 0, null);
metaStore.insertDataNodeInfos(new DataNodeInfo[] {insertInfo2, insertInfo3});
List<DataNodeInfo> getInfo2 = metaStore.getDataNodeInfoByUuid("UUID2");
Assert.assertTrue(insertInfo2.equals(getInfo2.get(0)));
List<DataNodeInfo> getInfo3 = metaStore.getDataNodeInfoByUuid("UUID3");
Assert.assertTrue(insertInfo3.equals(getInfo3.get(0)));
}
@Test
public void testDeleteDataNodeInfo() throws Exception {
DataNodeInfo insertInfo1 = new DataNodeInfo("UUID1", "hostname", "www.ssm.com", 100, 50, "lab");
DataNodeInfo insertInfo2 = new DataNodeInfo("UUID2", "HOSTNAME", "www.ssm.com", 0, 0, null);
DataNodeInfo insertInfo3 = new DataNodeInfo("UUID3", "HOSTNAME", "www.ssm.com", 0, 0, null);
metaStore.insertDataNodeInfos(new DataNodeInfo[] {insertInfo1, insertInfo2, insertInfo3});
List<DataNodeInfo> infos = metaStore.getAllDataNodeInfo();
Assert.assertTrue(infos.size() == 3);
metaStore.deleteDataNodeInfo(insertInfo1.getUuid());
infos = metaStore.getAllDataNodeInfo();
Assert.assertTrue(infos.size() == 2);
metaStore.deleteAllDataNodeInfo();
infos = metaStore.getAllDataNodeInfo();
Assert.assertTrue(infos.size() == 0);
}
@Test
public void testInsertDataNodeStorageInfo() throws Exception {
DataNodeStorageInfo insertInfo1 =
new DataNodeStorageInfo("UUID1", 10, 10, "storageid1", 0, 0, 0, 0, 0);
metaStore.insertDataNodeStorageInfo(insertInfo1);
List<DataNodeStorageInfo> getInfo1 = metaStore.getDataNodeStorageInfoByUuid("UUID1");
Assert.assertTrue(insertInfo1.equals(getInfo1.get(0)));
DataNodeStorageInfo insertInfo2 =
new DataNodeStorageInfo("UUID2", 10, 10, "storageid2", 0, 0, 0, 0, 0);
DataNodeStorageInfo insertInfo3 =
new DataNodeStorageInfo("UUID3", 10, 10, "storageid2", 0, 0, 0, 0, 0);
metaStore.insertDataNodeStorageInfos(new DataNodeStorageInfo[] {insertInfo2, insertInfo3});
List<DataNodeStorageInfo> getInfo2 = metaStore.getDataNodeStorageInfoByUuid("UUID2");
Assert.assertTrue(insertInfo2.equals(getInfo2.get(0)));
List<DataNodeStorageInfo> getInfo3 = metaStore.getDataNodeStorageInfoByUuid("UUID3");
Assert.assertTrue(insertInfo3.equals(getInfo3.get(0)));
}
@Test
public void testDeleteDataNodeStorageInfo() throws Exception {
DataNodeStorageInfo insertInfo1 =
new DataNodeStorageInfo("UUID1", 10, 10, "storageid1", 0, 0, 0, 0, 0);
DataNodeStorageInfo insertInfo2 =
new DataNodeStorageInfo("UUID2", 10, 10, "storageid2", 0, 0, 0, 0, 0);
DataNodeStorageInfo insertInfo3 =
new DataNodeStorageInfo("UUID3", 10, 10, "storageid3", 0, 0, 0, 0, 0);
metaStore.insertDataNodeStorageInfos(
new DataNodeStorageInfo[] {insertInfo1, insertInfo2, insertInfo3});
List<DataNodeStorageInfo> infos = metaStore.getAllDataNodeStorageInfo();
Assert.assertTrue(infos.size() == 3);
metaStore.deleteDataNodeStorageInfo(insertInfo1.getUuid());
infos = metaStore.getAllDataNodeStorageInfo();
Assert.assertTrue(infos.size() == 2);
metaStore.deleteAllDataNodeStorageInfo();
infos = metaStore.getAllDataNodeStorageInfo();
Assert.assertTrue(infos.size() == 0);
}
@Test
public void testInsertAndListAllBackUpInfo() throws MetaStoreException {
BackUpInfo backUpInfo1 = new BackUpInfo(1, "test1", "test1", 1);
BackUpInfo backUpInfo2 = new BackUpInfo(2, "test2", "test2", 2);
BackUpInfo backUpInfo3 = new BackUpInfo(3, "test3", "test3", 3);
metaStore.insertBackUpInfo(backUpInfo1);
metaStore.insertBackUpInfo(backUpInfo2);
metaStore.insertBackUpInfo(backUpInfo3);
List<BackUpInfo> backUpInfos = metaStore.listAllBackUpInfo();
Assert.assertTrue(backUpInfos.get(0).equals(backUpInfo1));
Assert.assertTrue(backUpInfos.get(1).equals(backUpInfo2));
Assert.assertTrue(backUpInfos.get(2).equals(backUpInfo3));
}
@Test
public void testGetBackUpInfoById() throws MetaStoreException {
BackUpInfo backUpInfo1 = new BackUpInfo(1, "test1", "test1", 1);
metaStore.insertBackUpInfo(backUpInfo1);
Assert.assertTrue(metaStore.getBackUpInfo(1).equals(backUpInfo1));
}
@Test
public void testDeleteBackUpInfo() throws MetaStoreException {
BackUpInfo backUpInfo1 = new BackUpInfo(1, "test1", "test1", 1);
metaStore.insertBackUpInfo(backUpInfo1);
Assert.assertTrue(metaStore.srcInbackup("test1/dfafdsaf"));
Assert.assertFalse(metaStore.srcInbackup("test2"));
metaStore.deleteBackUpInfo(1);
Assert.assertTrue(metaStore.listAllBackUpInfo().size() == 0);
metaStore.insertBackUpInfo(backUpInfo1);
metaStore.deleteAllBackUpInfo();
Assert.assertTrue(metaStore.listAllBackUpInfo().size() == 0);
}
@Test
public void testInsertAndListAllClusterInfo() throws MetaStoreException {
ClusterInfo clusterInfo1 = new ClusterInfo(1, "test1", "test1", "test1", "test1", "test1");
ClusterInfo clusterInfo2 = new ClusterInfo(2, "test2", "test2", "test2", "test2", "test2");
metaStore.insertClusterInfo(clusterInfo1);
metaStore.insertClusterInfo(clusterInfo2);
List<ClusterInfo> clusterInfos = metaStore.listAllClusterInfo();
Assert.assertTrue(clusterInfos.get(0).equals(clusterInfo1));
Assert.assertTrue(clusterInfos.get(1).equals(clusterInfo2));
}
@Test
public void testGetClusterInfoById() throws MetaStoreException {
ClusterInfo clusterInfo = new ClusterInfo(1, "test1", "test1", "test1", "test1", "test1");
metaStore.insertClusterInfo(clusterInfo);
Assert.assertTrue(metaStore.getClusterInfoByCid(1).equals(clusterInfo));
}
@Test
public void testDelectBackUpInfo() throws MetaStoreException {
ClusterInfo clusterInfo = new ClusterInfo(1, "test1", "test1", "test1", "test1", "test1");
metaStore.insertClusterInfo(clusterInfo);
metaStore.deleteClusterInfo(1);
Assert.assertTrue(metaStore.listAllClusterInfo().size() == 0);
metaStore.insertClusterInfo(clusterInfo);
metaStore.deleteAllClusterInfo();
Assert.assertTrue(metaStore.listAllClusterInfo().size() == 0);
}
@Test
public void testInsertSystemInfo() throws MetaStoreException {
SystemInfo systemInfo = new SystemInfo("test", "test");
metaStore.insertSystemInfo(systemInfo);
Assert.assertTrue(metaStore.getSystemInfoByProperty("test").equals(systemInfo));
}
@Test
public void testDeleteSystemInfo() throws MetaStoreException {
SystemInfo systemInfo = new SystemInfo("test", "test");
metaStore.insertSystemInfo(systemInfo);
metaStore.deleteSystemInfo("test");
Assert.assertTrue(metaStore.listAllSystemInfo().size() == 0);
}
@Test
public void testUpdateSystemInfo() throws MetaStoreException {
SystemInfo systemInfo = new SystemInfo("test", "test");
metaStore.insertSystemInfo(systemInfo);
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | true |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-metastore/src/test/java/org/smartdata/metastore/DBTest.java | smart-metastore/src/test/java/org/smartdata/metastore/DBTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartdata.metastore;
import org.dbunit.IDatabaseTester;
import org.dbunit.JdbcDatabaseTester;
import org.junit.After;
import org.junit.Before;
public abstract class DBTest extends TestDaoUtil {
protected IDatabaseTester databaseTester;
@Before
public void setUp() throws Exception {
initDao();
databaseTester = new JdbcDatabaseTester("org.sqlite.JDBC", url);
databaseTester.getConnection().getConfig().setProperty(
"http://www.dbunit.org/properties/tableType", new String[]{"TABLE", "VIEW"});
}
@After
public void close() throws Exception {
databaseTester.onTearDown();
closeDao();
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-metastore/src/test/java/org/smartdata/metastore/TestSqliteDB.java | smart-metastore/src/test/java/org/smartdata/metastore/TestSqliteDB.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartdata.metastore;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.smartdata.metastore.utils.MetaStoreUtils;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
/** Test operations with sqlite database. */
public class TestSqliteDB extends TestDaoUtil {
private MetaStore metaStore;
@Before
public void initDB() throws Exception {
initDao();
metaStore = new MetaStore(druidPool);
}
@After
public void closeDB() throws Exception {
metaStore = null;
closeDao();
}
@Test
public void testInitDB() throws Exception {
MetaStoreUtils.initializeDataBase(metaStore.getConnection());
}
@Test
public void testDropTables() throws Exception {
Connection conn = metaStore.getConnection();
Statement s = conn.createStatement();
metaStore.dropAllTables();
for (int i = 0; i < 10; i++) {
metaStore.execute("DROP TABLE IF EXISTS tb_" + i + ";");
metaStore.execute("CREATE TABLE tb_" + i + " (a INT(11));");
}
String dbUrl = conn.getMetaData().getURL();
if (dbUrl.startsWith(MetaStoreUtils.SQLITE_URL_PREFIX)) {
ResultSet rs = s.executeQuery("select tbl_name from sqlite_master;");
List<String> list = new ArrayList<>();
while (rs.next()) {
list.add(rs.getString(1));
}
metaStore.dropAllTables();
rs = s.executeQuery("select tbl_name from sqlite_master;");
List<String> list1 = new ArrayList<>();
while (rs.next()) {
list1.add(rs.getString(1));
}
Assert.assertEquals(10, list.size() - list1.size());
} else {
String dbName;
if (dbUrl.contains("?")) {
dbName = dbUrl.substring(dbUrl.indexOf("/", 13) + 1, dbUrl.indexOf("?"));
} else {
dbName = dbUrl.substring(dbUrl.lastIndexOf("/") + 1, dbUrl.length());
}
ResultSet rs =
s.executeQuery(
"SELECT TABLE_NAME FROM "
+ "INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '"
+ dbName
+ "';");
List<String> list = new ArrayList<>();
while (rs.next()) {
list.add(rs.getString(1));
}
metaStore.dropAllTables();
rs =
s.executeQuery(
"SELECT TABLE_NAME FROM "
+ "INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '"
+ dbName
+ "';");
List<String> list1 = new ArrayList<>();
while (rs.next()) {
list1.add(rs.getString(1));
}
Assert.assertEquals(10, list.size() - list1.size());
}
conn.close();
}
@Test
public void testDBBlankStatements() throws Exception {
String[] presqls =
new String[] {
"INSERT INTO rule (state, rule_text, submit_time, checked_count, "
+ "generated_cmdlets) VALUES (0, 'file: every 1s \n"
+ " | "
+ "accessCount(5s) > 3 | cache', 1494903787619, 0, 0);"
};
for (int i = 0; i < presqls.length; i++) {
String sql = presqls[i];
metaStore.execute(sql);
}
String[] sqls =
new String[] {
"DROP TABLE IF EXISTS VIR_ACC_CNT_TAB_1_accessCount_5000;",
"CREATE TABLE VIR_ACC_CNT_TAB_1_accessCount_5000 "
+ "AS SELECT * FROM blank_access_count_info;",
"SELECT fid from VIR_ACC_CNT_TAB_1_accessCount_5000;",
"SELECT path FROM file WHERE (fid IN (SELECT fid FROM "
+ "VIR_ACC_CNT_TAB_1_accessCount_5000 WHERE ((count > 3))));"
};
for (int i = 0; i < sqls.length * 3; i++) {
int idx = i % sqls.length;
String sql = sqls[idx];
metaStore.execute(sql);
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-metastore/src/test/java/org/smartdata/metastore/TestDruid.java | smart-metastore/src/test/java/org/smartdata/metastore/TestDruid.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartdata.metastore;
import org.junit.Assert;
import org.junit.Test;
import org.smartdata.metastore.utils.MetaStoreUtils;
import org.smartdata.model.RuleInfo;
import org.smartdata.model.RuleState;
import java.io.InputStream;
import java.util.Properties;
public class TestDruid {
@Test
public void test() throws Exception {
InputStream in = getClass().getClassLoader()
.getResourceAsStream("druid-template.xml");
Properties p = new Properties();
p.loadFromXML(in);
String dbFile = TestDBUtil.getUniqueEmptySqliteDBFile();
String url = MetaStoreUtils.SQLITE_URL_PREFIX + dbFile;
p.setProperty("url", url);
DruidPool druidPool = new DruidPool(p);
MetaStore adapter = new MetaStore(druidPool);
String rule = "file : accessCount(10m) > 20 \n\n"
+ "and length() > 3 | cache";
long submitTime = System.currentTimeMillis();
RuleInfo info1 = new RuleInfo(0, submitTime,
rule, RuleState.ACTIVE, 0, 0, 0);
Assert.assertTrue(adapter.insertNewRule(info1));
RuleInfo info11 = adapter.getRuleInfo(info1.getId());
Assert.assertTrue(info1.equals(info11));
long now = System.currentTimeMillis();
adapter.updateRuleInfo(info1.getId(), RuleState.DELETED, now, 1, 1);
RuleInfo info12 = adapter.getRuleInfo(info1.getId());
Assert.assertTrue(info12.getLastCheckTime() == now);
druidPool.close();
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-metastore/src/test/java/org/smartdata/metastore/TestRulesTable.java | smart-metastore/src/test/java/org/smartdata/metastore/TestRulesTable.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartdata.metastore;
import org.junit.Assert;
import org.junit.Test;
import org.smartdata.model.RuleInfo;
import org.smartdata.model.RuleState;
import java.util.List;
/**
* Tests for table 'rules'.
*/
public class TestRulesTable extends TestDaoUtil {
/**
* Insert rules into table and retrieve them back.
* @throws Exception
*/
@Test
public void testRuleInsert() throws Exception {
initDao();
MetaStore adapter = new MetaStore(druidPool);
String rule = "file : accessCount(10m) > 20 \n\n"
+ "and length() > 3 | cache";
long submitTime = System.currentTimeMillis();
RuleInfo info1 = new RuleInfo(0, submitTime,
rule, RuleState.ACTIVE, 0, 0, 0);
Assert.assertTrue(adapter.insertNewRule(info1));
RuleInfo info11 = adapter.getRuleInfo(info1.getId());
Assert.assertTrue(info1.equals(info11));
RuleInfo info2 = new RuleInfo(0, submitTime,
rule, RuleState.ACTIVE, 0, 0, 0);
Assert.assertTrue(adapter.insertNewRule(info2));
RuleInfo info21 = adapter.getRuleInfo(info2.getId());
Assert.assertFalse(info11.equals(info21));
List<RuleInfo> infos = adapter.getRuleInfo();
assert(infos.size() == 2);
closeDao();
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-metastore/src/test/java/org/smartdata/metastore/TestDaoUtil.java | smart-metastore/src/test/java/org/smartdata/metastore/TestDaoUtil.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartdata.metastore;
import org.smartdata.metastore.utils.MetaStoreUtils;
import java.io.File;
import java.io.InputStream;
import java.util.Properties;
public class TestDaoUtil {
protected DruidPool druidPool;
protected String dbFile;
protected String url;
public void initDao() throws Exception {
InputStream in = getClass().getClassLoader()
.getResourceAsStream("druid-template.xml");
Properties p = new Properties();
p.loadFromXML(in);
dbFile = TestDBUtil.getUniqueEmptySqliteDBFile();
url = MetaStoreUtils.SQLITE_URL_PREFIX + dbFile;
p.setProperty("url", url);
druidPool = new DruidPool(p);
}
public void closeDao() throws Exception {
File db = new File(dbFile);
if (db.exists()) {
db.delete();
}
if (druidPool != null) {
druidPool.close();
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-metastore/src/test/java/org/smartdata/metastore/TestDBUtil.java | smart-metastore/src/test/java/org/smartdata/metastore/TestDBUtil.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartdata.metastore;
import org.smartdata.metastore.utils.MetaStoreUtils;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.sql.Connection;
import java.util.UUID;
/**
* Utilities for accessing the testing database.
*/
public class TestDBUtil {
/**
* Get a connect to the testing database. A new physical database
* file for each call.
*
* @return
*/
public static Connection getTestDBInstance()
throws MetaStoreException {
// TODO remove today
String srcdir = System.getProperty("srcdir",
System.getProperty("user.dir") + "/src/main/resources");
String srcPath = srcdir + "/data-schema.db";
String destPath = getUniqueDBFilePath();
copyFile(srcPath, destPath);
Connection conn = MetaStoreUtils.createSqliteConnection(destPath);
return conn;
}
public static String getTestDir() {
String testdir = System.getProperty("testdir",
System.getProperty("user.dir") + "/target/test-dir");
return testdir;
}
public static String getUniqueFilePath() {
return getTestDir() + "/" + UUID.randomUUID().toString() + System.currentTimeMillis();
}
public static String getUniqueDBFilePath() {
return getUniqueFilePath() + ".db";
}
public static Connection getUniqueEmptySqliteDBInstance()
throws MetaStoreException {
return MetaStoreUtils.createSqliteConnection(getUniqueEmptySqliteDBFile());
}
/**
* Get an initialized empty Sqlite database file path.
*
* @return
* @throws IOException
* @throws MetaStoreException
* @throws ClassNotFoundException
*/
public static String getUniqueEmptySqliteDBFile()
throws MetaStoreException {
String dbFile = getUniqueDBFilePath();
Connection conn = null;
try {
conn = MetaStoreUtils.createSqliteConnection(dbFile);
MetaStoreUtils.initializeDataBase(conn);
conn.close();
return dbFile;
} catch (Exception e) {
throw new MetaStoreException(e);
} finally {
if (conn != null) {
try {
conn.close();
} catch (Exception e) {
throw new MetaStoreException(e);
}
}
File file = new File(dbFile);
file.deleteOnExit();
}
}
public static boolean copyFile(String srcPath, String destPath) {
boolean flag = false;
File src = new File(srcPath);
if (!src.exists()) {
System.out.println("source file:" + srcPath + "not exist");
return false;
}
File dest = new File(destPath);
if (dest.exists()) {
dest.delete();
} else {
if (!dest.getParentFile().exists()) {
if (!dest.getParentFile().mkdirs()) {
return false;
}
}
}
BufferedInputStream in = null;
PrintStream out = null;
try {
in = new BufferedInputStream(new FileInputStream(src));
out = new PrintStream(
new BufferedOutputStream(
new FileOutputStream(dest)));
byte[] buffer = new byte[1024 * 100];
int len = -1;
while ((len = in.read(buffer)) != -1) {
out.write(buffer, 0, len);
}
dest.deleteOnExit();
return true;
} catch (Exception e) {
System.out.println("copying failed" + e.getMessage());
flag = true;
return false;
} finally {
try {
in.close();
out.close();
if (flag) {
dest.delete();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-metastore/src/test/java/org/smartdata/metastore/dao/TestStoragePolicyDao.java | smart-metastore/src/test/java/org/smartdata/metastore/dao/TestStoragePolicyDao.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartdata.metastore.dao;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.smartdata.metastore.TestDaoUtil;
import org.smartdata.model.StoragePolicy;
public class TestStoragePolicyDao extends TestDaoUtil {
private StoragePolicyDao storagePolicyDao;
@Before
public void initStoragePolicyDao() throws Exception {
initDao();
storagePolicyDao = new StoragePolicyDao(druidPool.getDataSource());
}
@After
public void closeStoragePolicyDao() throws Exception {
closeDao();
storagePolicyDao = null;
}
@Test
public void testInsertGetStorage_policyTable() throws Exception {
StoragePolicy storagePolicy = new StoragePolicy((byte) 1, "pName");
storagePolicyDao.insertStoragePolicyTable(storagePolicy);
Assert.assertTrue(storagePolicyDao.getStoragePolicyName(1).equals("pName"));
if (!storagePolicyDao.isExist("pName") || !storagePolicyDao.isExist(1)) {
throw new Exception("isExist() error");
}
if (storagePolicyDao.getStoragePolicyName(1) != "pName") {
throw new Exception("getStoragePolicyName() error");
}
if (storagePolicyDao.getStorageSid("pName") != 1) {
throw new Exception("getStorageSid() error");
}
storagePolicyDao.getStoragePolicyIdNameMap();
storagePolicyDao.deleteStoragePolicy("pName");
if (storagePolicyDao.isExist("pName")) {
throw new Exception("deleteStoragePolicy() error");
}
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-metastore/src/test/java/org/smartdata/metastore/dao/TestCompressionFileDao.java | smart-metastore/src/test/java/org/smartdata/metastore/dao/TestCompressionFileDao.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartdata.metastore.dao;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.smartdata.metastore.TestDaoUtil;
import org.smartdata.model.CompressionFileState;
import java.util.ArrayList;
import java.util.List;
public class TestCompressionFileDao extends TestDaoUtil {
private CompressionFileDao compressionFileDao;
private String compressionImpl = "Zlib";
private List<Long> originalPos = new ArrayList<>();
private List<Long> compressedPos = new ArrayList<>();
@Before
public void initCompressionFileDao() throws Exception {
initDao();
compressionFileDao = new CompressionFileDao(druidPool.getDataSource());
originalPos.add(9000L);
originalPos.add(8000L);
compressedPos.add(3000L);
compressedPos.add(2000L);
}
@After
public void closeCompressionFileDao() throws Exception {
closeDao();
compressionFileDao = null;
}
@Test
public void testInsertDeleteCompressionFiles() throws Exception {
CompressionFileState compressionInfo = new CompressionFileState(
"/test", 131072, compressionImpl, originalPos.toArray(new Long[0]),
compressedPos.toArray(new Long[0]));
//insert test
compressionFileDao.insert(compressionInfo);
Assert.assertTrue(compressionFileDao.getInfoByPath("/test").
getOriginalPos()[0].equals(9000L));
//delete test
compressionFileDao.deleteByPath("/test");
Assert.assertTrue(compressionFileDao.getAll().size() == 0);
}
@Test
public void testInsertUpdate() throws Exception {
CompressionFileState compressionInfo = new CompressionFileState(
"/test", 131072, compressionImpl, originalPos.toArray(new Long[0]),
compressedPos.toArray(new Long[0]));
//insert test
compressionFileDao.insertUpdate(compressionInfo);
Assert.assertTrue(compressionFileDao.getInfoByPath("/test").
getOriginalPos()[0].equals(9000L));
//update test
compressionInfo.setOriginalLength(1000);
compressionInfo.setCompressedLength(100);
compressionFileDao.insertUpdate(compressionInfo);
CompressionFileState newCompressionInfo = compressionFileDao.getInfoByPath("/test");
Assert.assertEquals(compressionInfo.getOriginalLength(),
newCompressionInfo.getOriginalLength());
Assert.assertEquals(compressionInfo.getCompressedLength(),
newCompressionInfo.getCompressedLength());
}
@Test
public void testGetCompressionInfo() throws Exception {
long originalLen = 100;
long compressedLen = 50;
CompressionFileState compressionInfo = new CompressionFileState(
"/test1", 131072, compressionImpl, originalLen, compressedLen,
originalPos.toArray(new Long[0]), compressedPos.toArray(new Long[0]));
CompressionFileState compressionInfo2 = new CompressionFileState(
"/test2", 131072, compressionImpl, originalPos.toArray(new Long[0]),
compressedPos.toArray(new Long[0]));
compressionFileDao.insert(compressionInfo);
compressionFileDao.insert(compressionInfo2);
CompressionFileState dbcompressionInfo = compressionFileDao.getInfoByPath("/test1");
Assert.assertTrue(dbcompressionInfo.getPath().equals("/test1"));
Assert.assertTrue(dbcompressionInfo.getBufferSize() == 131072);
Assert.assertTrue(dbcompressionInfo.getCompressionImpl().equals(compressionImpl));
Assert.assertTrue(dbcompressionInfo.getOriginalPos()[0].equals(9000L));
Assert.assertTrue(dbcompressionInfo.getCompressedPos()[1].equals(2000L));
Assert.assertTrue(compressionFileDao.getAll().size() == 2);
Assert.assertEquals(originalLen, dbcompressionInfo.getOriginalLength());
Assert.assertEquals(compressedLen, dbcompressionInfo.getCompressedLength());
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Intel-bigdata/SSM | https://github.com/Intel-bigdata/SSM/blob/e0c90f054687a18c4e095547ac5e31b8b313b3ef/smart-metastore/src/test/java/org/smartdata/metastore/dao/TestFileDiffDao.java | smart-metastore/src/test/java/org/smartdata/metastore/dao/TestFileDiffDao.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartdata.metastore.dao;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.smartdata.metastore.TestDaoUtil;
import org.smartdata.model.FileDiff;
import org.smartdata.model.FileDiffState;
import org.smartdata.model.FileDiffType;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public class TestFileDiffDao extends TestDaoUtil {
private FileDiffDao fileDiffDao;
@Before
public void initFileDiffDAO() throws Exception {
initDao();
fileDiffDao = new FileDiffDao(druidPool.getDataSource());
}
@After
public void closeFileDiffDAO() throws Exception {
closeDao();
fileDiffDao = null;
}
@Test
public void testInsertAndGetSingleRecord() {
FileDiff fileDiff = new FileDiff();
fileDiff.setParameters(new HashMap<String, String>());
fileDiff.getParameters().put("-test", "test");
fileDiff.setSrc("test");
fileDiff.setState(FileDiffState.PENDING);
fileDiff.setDiffType(FileDiffType.APPEND);
fileDiff.setCreateTime(1);
fileDiffDao.insert(fileDiff);
Assert.assertTrue(fileDiffDao.getAll().get(0).equals(fileDiff));
}
@Test
public void testBatchUpdateAndQuery() {
FileDiff[] fileDiffs = new FileDiff[2];
fileDiffs[0] = new FileDiff();
fileDiffs[0].setDiffId(1);
fileDiffs[0].setParameters(new HashMap<String, String>());
fileDiffs[0].setSrc("test");
fileDiffs[0].setState(FileDiffState.RUNNING);
fileDiffs[0].setDiffType(FileDiffType.APPEND);
fileDiffs[0].setCreateTime(1);
fileDiffs[1] = new FileDiff();
fileDiffs[1].setDiffId(2);
fileDiffs[1].setParameters(new HashMap<String, String>());
fileDiffs[1].setSrc("src");
fileDiffs[1].setState(FileDiffState.PENDING);
fileDiffs[1].setDiffType(FileDiffType.APPEND);
fileDiffs[1].setCreateTime(1);
fileDiffDao.insert(fileDiffs);
List<FileDiff> fileInfoList = fileDiffDao.getAll();
for (int i = 0; i < 2; i++) {
Assert.assertTrue(fileInfoList.get(i).equals(fileDiffs[i]));
}
//update
List<Long> dids = new ArrayList<>();
dids.add(1L);
dids.add(2L);
List<String> parameters = new ArrayList<>();
parameters.add(fileDiffs[0].getParametersJsonString());
parameters.add(fileDiffs[1].getParametersJsonString());
List<FileDiffState> fileDiffStates = new ArrayList<>();
fileDiffStates.add(FileDiffState.APPLIED);
fileDiffStates.add(fileDiffs[1].getState());
fileDiffDao.batchUpdate(dids, fileDiffStates, parameters);
fileInfoList = fileDiffDao.getAll();
Assert.assertTrue(fileInfoList.get(0).getState().equals(FileDiffState.APPLIED));
fileDiffDao.batchUpdate(dids, FileDiffState.MERGED);
Assert.assertTrue(fileDiffDao.getAll().get(0).getState().equals(FileDiffState.MERGED));
}
@Test
public void testBatchInsertAndQuery() {
List<FileDiff> fileDiffs = new ArrayList<>();
FileDiff fileDiff = new FileDiff();
fileDiff.setParameters(new HashMap<String, String>());
fileDiff.setSrc("test");
fileDiff.setState(FileDiffState.RUNNING);
fileDiff.setDiffType(FileDiffType.APPEND);
fileDiff.setCreateTime(1);
fileDiffs.add(fileDiff);
fileDiff = new FileDiff();
fileDiff.setParameters(new HashMap<String, String>());
fileDiff.setSrc("src");
fileDiff.setState(FileDiffState.PENDING);
fileDiff.setDiffType(FileDiffType.APPEND);
fileDiff.setCreateTime(1);
fileDiffs.add(fileDiff);
fileDiffDao.insert(fileDiffs);
List<FileDiff> fileInfoList = fileDiffDao.getAll();
for (int i = 0; i < 2; i++) {
Assert.assertTrue(fileInfoList.get(i).equals(fileDiffs.get(i)));
}
List<String> paths = fileDiffDao.getSyncPath(0);
Assert.assertTrue(paths.size() == 1);
Assert.assertTrue(fileDiffDao.getPendingDiff("src").size() == 1);
Assert.assertTrue(fileDiffDao.getByState("test", FileDiffState.RUNNING).size() == 1);
}
@Test
public void testUpdate() {
FileDiff[] fileDiffs = new FileDiff[2];
fileDiffs[0] = new FileDiff();
fileDiffs[0].setDiffId(1);
fileDiffs[0].setRuleId(1);
fileDiffs[0].setParameters(new HashMap<String, String>());
fileDiffs[0].setSrc("test");
fileDiffs[0].setState(FileDiffState.PENDING);
fileDiffs[0].setDiffType(FileDiffType.APPEND);
fileDiffs[0].setCreateTime(1);
fileDiffs[1] = new FileDiff();
fileDiffs[1].setDiffId(2);
fileDiffs[0].setRuleId(1);
fileDiffs[1].setParameters(new HashMap<String, String>());
fileDiffs[1].setSrc("src");
fileDiffs[1].setState(FileDiffState.PENDING);
fileDiffs[1].setDiffType(FileDiffType.APPEND);
fileDiffs[1].setCreateTime(1);
fileDiffDao.insert(fileDiffs);
fileDiffDao.update(1, FileDiffState.RUNNING);
fileDiffs[0].setState(FileDiffState.RUNNING);
Assert.assertTrue(fileDiffDao.getById(1).equals(fileDiffs[0]));
Assert.assertTrue(fileDiffDao.getPendingDiff().size() == 1);
fileDiffs[0].getParameters().put("-offset", "0");
fileDiffs[0].setSrc("test1");
fileDiffs[1].setCreateTime(2);
fileDiffs[1].setRuleId(2);
fileDiffs[1].setDiffType(FileDiffType.RENAME);
fileDiffDao.update(fileDiffs);
Assert.assertTrue(fileDiffDao.getById(1).equals(fileDiffs[0]));
Assert.assertTrue(fileDiffDao.getById(2).equals(fileDiffs[1]));
}
@Test
public void testDeleteUselessRecords() {
FileDiff[] fileDiffs = new FileDiff[2];
fileDiffs[0] = new FileDiff();
fileDiffs[0].setDiffId(1);
fileDiffs[0].setRuleId(1);
fileDiffs[0].setParameters(new HashMap<String, String>());
fileDiffs[0].setSrc("test");
fileDiffs[0].setState(FileDiffState.PENDING);
fileDiffs[0].setDiffType(FileDiffType.APPEND);
fileDiffs[0].setCreateTime(1);
fileDiffs[1] = new FileDiff();
fileDiffs[1].setDiffId(2);
fileDiffs[0].setRuleId(1);
fileDiffs[1].setParameters(new HashMap<String, String>());
fileDiffs[1].setSrc("src");
fileDiffs[1].setState(FileDiffState.PENDING);
fileDiffs[1].setDiffType(FileDiffType.APPEND);
fileDiffs[1].setCreateTime(2);
fileDiffDao.insert(fileDiffs);
Assert.assertEquals(fileDiffDao.getUselessRecordsNum(), 0);
fileDiffDao.update(1, FileDiffState.APPLIED);
Assert.assertEquals(fileDiffDao.getUselessRecordsNum(), 1);
fileDiffDao.update(2, FileDiffState.FAILED);
Assert.assertEquals(fileDiffDao.getUselessRecordsNum(), 2);
fileDiffDao.update(2, FileDiffState.DELETED);
Assert.assertEquals(fileDiffDao.getUselessRecordsNum(), 2);
fileDiffDao.deleteUselessRecords(1);
Assert.assertEquals(fileDiffDao.getAll().size(), 1);
}
}
| java | Apache-2.0 | e0c90f054687a18c4e095547ac5e31b8b313b3ef | 2026-01-05T02:41:11.405497Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.