Dataset Preview
Duplicate
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code:   DatasetGenerationError
Exception:    ArrowInvalid
Message:      JSON parse error: Missing a closing quotation mark in string. in row 645
Traceback:    Traceback (most recent call last):
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/packaged_modules/json/json.py", line 145, in _generate_tables
                  dataset = json.load(f)
                File "/usr/local/lib/python3.9/json/__init__.py", line 293, in load
                  return loads(fp.read(),
                File "/usr/local/lib/python3.9/json/__init__.py", line 346, in loads
                  return _default_decoder.decode(s)
                File "/usr/local/lib/python3.9/json/decoder.py", line 340, in decode
                  raise JSONDecodeError("Extra data", s, end)
              json.decoder.JSONDecodeError: Extra data: line 2 column 1 (char 2213)
              
              During handling of the above exception, another exception occurred:
              
              Traceback (most recent call last):
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1995, in _prepare_split_single
                  for _, table in generator:
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/packaged_modules/json/json.py", line 148, in _generate_tables
                  raise e
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/packaged_modules/json/json.py", line 122, in _generate_tables
                  pa_table = paj.read_json(
                File "pyarrow/_json.pyx", line 308, in pyarrow._json.read_json
                File "pyarrow/error.pxi", line 154, in pyarrow.lib.pyarrow_internal_check_status
                File "pyarrow/error.pxi", line 91, in pyarrow.lib.check_status
              pyarrow.lib.ArrowInvalid: JSON parse error: Missing a closing quotation mark in string. in row 645
              
              The above exception was the direct cause of the following exception:
              
              Traceback (most recent call last):
                File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1529, in compute_config_parquet_and_info_response
                  parquet_operations = convert_to_parquet(builder)
                File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1154, in convert_to_parquet
                  builder.download_and_prepare(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1027, in download_and_prepare
                  self._download_and_prepare(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1122, in _download_and_prepare
                  self._prepare_split(split_generator, **prepare_split_kwargs)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1882, in _prepare_split
                  for job_id, done, content in self._prepare_split_single(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2038, in _prepare_split_single
                  raise DatasetGenerationError("An error occurred while generating the dataset") from e
              datasets.exceptions.DatasetGenerationError: An error occurred while generating the dataset

Need help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.

text
string
meta
dict
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.banadiga.concurrent</groupId> <artifactId>sandbox-concurrent</artifactId> <version>1.0-SNAPSHOT</version> <packaging>jar</packaging> <name>sandbox | concurrent</name> <description>Demo project for concurrent</description> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <java.version>1.8</java.version> <maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.target>1.8</maven.compiler.target> </properties> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>3.6.1</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-jar-plugin</artifactId> <configuration> <archive> <manifest> <addClasspath>true</addClasspath> <mainClass>com.banadiga.concurrent.ConcurrecyApplication</mainClass> </manifest> </archive> </configuration> </plugin> </plugins> </build> </project>
{ "content_hash": "f8f6570ed7ef5448c5ad738450f0ddc8", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 108, "avg_line_length": 34.34090909090909, "alnum_prop": 0.6565188616810059, "repo_name": "banadiga/sandbox", "id": "fed2f800a8c9d5938a2d5532999fcabc54752e72", "size": "1511", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "concurrent/pom.xml", "mode": "33188", "license": "mit", "language": [ { "name": "Groovy", "bytes": "665" }, { "name": "HTML", "bytes": "4387" }, { "name": "Java", "bytes": "108069" }, { "name": "JavaScript", "bytes": "344" }, { "name": "Shell", "bytes": "2731" } ] }
using System.Reflection; using System.Runtime.InteropServices; using System.Security; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("Orchard.Framework")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyProduct("Orchard")] [assembly: AssemblyCopyright("Copyright © Outercurve Foundation 2009")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("b31f0199-7ccd-40ca-8dc5-d5ad32aa3175")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.4.0")] [assembly: AssemblyFileVersion("1.4.0")] [assembly: SecurityTransparent]
{ "content_hash": "7a4399c5dd56bd7e9aa96253b95c5b08", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 84, "avg_line_length": 35.05, "alnum_prop": 0.7467902995720399, "repo_name": "kayone/Orchard", "id": "0ec3d5c34c7af053b9d51612aae55ae41e591f6a", "size": "1405", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Orchard/Properties/AssemblyInfo.cs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ASP", "bytes": "2330" }, { "name": "C", "bytes": "4755" }, { "name": "C#", "bytes": "5178578" }, { "name": "JavaScript", "bytes": "1156043" } ] }
package com.tp.ems.common.mapper; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.TimeZone; import org.apache.commons.lang3.StringEscapeUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonParser.Feature; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.util.JSONPObject; import com.fasterxml.jackson.module.jaxb.JaxbAnnotationModule; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * 简单封装Jackson,实现JSON String<->Java Object的Mapper. * 封装不同的输出风格, 使用不同的builder函数创建实例. * @author ThinkGem * @version 2013-11-15 */ public class JsonMapper extends ObjectMapper { private static final long serialVersionUID = 1L; private static Logger logger = LoggerFactory.getLogger(JsonMapper.class); private static JsonMapper mapper; public JsonMapper() { this(Include.NON_EMPTY); } public JsonMapper(Include include) { // 设置输出时包含属性的风格 if (include != null) { this.setSerializationInclusion(include); } // 允许单引号、允许不带引号的字段名称 this.enableSimple(); // 设置输入时忽略在JSON字符串中存在但Java对象实际没有的属性 this.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); // 空值处理为空串 this.getSerializerProvider().setNullValueSerializer(new JsonSerializer<Object>(){ @Override public void serialize(Object value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException { jgen.writeString(""); } }); // 进行HTML解码。 this.registerModule(new SimpleModule().addSerializer(String.class, new JsonSerializer<String>(){ @Override public void serialize(String value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException { jgen.writeString(StringEscapeUtils.unescapeHtml4(value)); } })); // 设置时区 this.setTimeZone(TimeZone.getDefault());//getTimeZone("GMT+8:00") } /** * 创建只输出非Null且非Empty(如List.isEmpty)的属性到Json字符串的Mapper,建议在外部接口中使用. */ public static JsonMapper getInstance() { if (mapper == null){ mapper = new JsonMapper().enableSimple(); } return mapper; } /** * 创建只输出初始值被改变的属性到Json字符串的Mapper, 最节约的存储方式,建议在内部接口中使用。 */ public static JsonMapper nonDefaultMapper() { if (mapper == null){ mapper = new JsonMapper(Include.NON_DEFAULT); } return mapper; } /** * Object可以是POJO,也可以是Collection或数组。 * 如果对象为Null, 返回"null". * 如果集合为空集合, 返回"[]". */ public String toJson(Object object) { try { return this.writeValueAsString(object); } catch (IOException e) { logger.warn("write to json string error:" + object, e); return null; } } /** * 反序列化POJO或简单Collection如List<String>. * * 如果JSON字符串为Null或"null"字符串, 返回Null. * 如果JSON字符串为"[]", 返回空集合. * * 如需反序列化复杂Collection如List<MyBean>, 请使用fromJson(String,JavaType) * @see #fromJson(String, JavaType) */ public <T> T fromJson(String jsonString, Class<T> clazz) { if (StringUtils.isEmpty(jsonString)) { return null; } try { return this.readValue(jsonString, clazz); } catch (IOException e) { logger.warn("parse json string error:" + jsonString, e); return null; } } /** * 反序列化复杂Collection如List<Bean>, 先使用函數createCollectionType构造类型,然后调用本函数. * @see #createCollectionType(Class, Class...) */ @SuppressWarnings("unchecked") public <T> T fromJson(String jsonString, JavaType javaType) { if (StringUtils.isEmpty(jsonString)) { return null; } try { return (T) this.readValue(jsonString, javaType); } catch (IOException e) { logger.warn("parse json string error:" + jsonString, e); return null; } } /** * 構造泛型的Collection Type如: * ArrayList<MyBean>, 则调用constructCollectionType(ArrayList.class,MyBean.class) * HashMap<String,MyBean>, 则调用(HashMap.class,String.class, MyBean.class) */ public JavaType createCollectionType(Class<?> collectionClass, Class<?>... elementClasses) { return this.getTypeFactory().constructParametricType(collectionClass, elementClasses); } /** * 當JSON裡只含有Bean的部分屬性時,更新一個已存在Bean,只覆蓋該部分的屬性. */ @SuppressWarnings("unchecked") public <T> T update(String jsonString, T object) { try { return (T) this.readerForUpdating(object).readValue(jsonString); } catch (JsonProcessingException e) { logger.warn("update json string:" + jsonString + " to object:" + object + " error.", e); } catch (IOException e) { logger.warn("update json string:" + jsonString + " to object:" + object + " error.", e); } return null; } /** * 輸出JSONP格式數據. */ public String toJsonP(String functionName, Object object) { return toJson(new JSONPObject(functionName, object)); } /** * 設定是否使用Enum的toString函數來讀寫Enum, * 為False時時使用Enum的name()函數來讀寫Enum, 默認為False. * 注意本函數一定要在Mapper創建後, 所有的讀寫動作之前調用. */ public JsonMapper enableEnumUseToString() { this.enable(SerializationFeature.WRITE_ENUMS_USING_TO_STRING); this.enable(DeserializationFeature.READ_ENUMS_USING_TO_STRING); return this; } /** * 支持使用Jaxb的Annotation,使得POJO上的annotation不用与Jackson耦合。 * 默认会先查找jaxb的annotation,如果找不到再找jackson的。 */ public JsonMapper enableJaxbAnnotation() { JaxbAnnotationModule module = new JaxbAnnotationModule(); this.registerModule(module); return this; } /** * 允许单引号 * 允许不带引号的字段名称 */ public JsonMapper enableSimple() { this.configure(Feature.ALLOW_SINGLE_QUOTES, true); this.configure(Feature.ALLOW_UNQUOTED_FIELD_NAMES, true); return this; } /** * 取出Mapper做进一步的设置或使用其他序列化API. */ public ObjectMapper getMapper() { return this; } /** * 对象转换为JSON字符串 * @param object * @return */ public static String toJsonString(Object object){ return JsonMapper.getInstance().toJson(object); } /** * JSON字符串转换为对象 * @param jsonString * @param clazz * @return */ public static Object fromJsonString(String jsonString, Class<?> clazz){ return JsonMapper.getInstance().fromJson(jsonString, clazz); } /** * 测试 */ public static void main(String[] args) { List<Map<String, Object>> list = Lists.newArrayList(); Map<String, Object> map = Maps.newHashMap(); map.put("id", 1); map.put("pId", -1); map.put("name", "根节点"); list.add(map); map = Maps.newHashMap(); map.put("id", 2); map.put("pId", 1); map.put("name", "你好"); map.put("open", true); list.add(map); String json = JsonMapper.getInstance().toJson(list); System.out.println(json); } }
{ "content_hash": "043b76e53134a91c6216c12cda12bd63", "timestamp": "", "source": "github", "line_count": 259, "max_line_length": 98, "avg_line_length": 26.91891891891892, "alnum_prop": 0.7216006884681584, "repo_name": "347184068/gmenergy", "id": "5552a575fd5b1ec4e4e0567363266ac80efb0531", "size": "7974", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/tp/ems/common/mapper/JsonMapper.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "3753" }, { "name": "Batchfile", "bytes": "2131" }, { "name": "CSS", "bytes": "1775341" }, { "name": "HTML", "bytes": "2196889" }, { "name": "Java", "bytes": "1413394" }, { "name": "JavaScript", "bytes": "6467323" }, { "name": "PHP", "bytes": "8060" } ] }
import {describe, it, expect, beforeEach, ddescribe, iit, xit, el} from 'angular2/test_lib'; import {StyleUrlResolver} from 'angular2/src/render/dom/compiler/style_url_resolver'; import {UrlResolver} from 'angular2/src/services/url_resolver'; import {AppRootUrl} from 'angular2/src/services/app_root_url'; export function main() { describe('StyleUrlResolver', () => { let styleUrlResolver; beforeEach( () => { styleUrlResolver = new StyleUrlResolver(new UrlResolver(new AppRootUrl(""))); }); it('should resolve "url()" urls', () => { var css = ` .foo { background-image: url("double.jpg"); background-image: url('simple.jpg'); background-image: url(noquote.jpg); }`; var expectedCss = ` .foo { background-image: url('http://ng.io/double.jpg'); background-image: url('http://ng.io/simple.jpg'); background-image: url('http://ng.io/noquote.jpg'); }`; var resolvedCss = styleUrlResolver.resolveUrls(css, 'http://ng.io'); expect(resolvedCss).toEqual(expectedCss); }); it('should resolve "@import" urls', () => { var css = ` @import '1.css'; @import "2.css"; `; var expectedCss = ` @import 'http://ng.io/1.css'; @import 'http://ng.io/2.css'; `; var resolvedCss = styleUrlResolver.resolveUrls(css, 'http://ng.io'); expect(resolvedCss).toEqual(expectedCss); }); it('should resolve "@import url()" urls', () => { var css = ` @import url('3.css'); @import url("4.css"); @import url(5.css); `; var expectedCss = ` @import url('http://ng.io/3.css'); @import url('http://ng.io/4.css'); @import url('http://ng.io/5.css'); `; var resolvedCss = styleUrlResolver.resolveUrls(css, 'http://ng.io'); expect(resolvedCss).toEqual(expectedCss); }); it('should support media query in "@import"', () => { var css = ` @import 'print.css' print; @import url(print.css) print; `; var expectedCss = ` @import 'http://ng.io/print.css' print; @import url('http://ng.io/print.css') print; `; var resolvedCss = styleUrlResolver.resolveUrls(css, 'http://ng.io'); expect(resolvedCss).toEqual(expectedCss); }); }); }
{ "content_hash": "27b48bfeab0b3188e81d04c2ea7d2918", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 97, "avg_line_length": 30.69736842105263, "alnum_prop": 0.5816545220745821, "repo_name": "gfogle/angular", "id": "d9d3707b3fb651461da3f3013ada3bff9a0fff74", "size": "2333", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "modules/angular2/test/render/dom/compiler/style_url_resolver_spec.ts", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "56049" }, { "name": "Dart", "bytes": "430550" }, { "name": "HTML", "bytes": "49687" }, { "name": "JavaScript", "bytes": "105881" }, { "name": "Shell", "bytes": "14435" }, { "name": "TypeScript", "bytes": "2334107" } ] }
package opennlp.tools.util; import java.io.ByteArrayInputStream; import java.util.HashMap; import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import opennlp.tools.ml.EventTrainer; public class TrainingParametersTest { @Test void testConstructors() throws Exception { TrainingParameters tp1 = new TrainingParameters(build("key1=val1,key2=val2,key3=val3")); TrainingParameters tp2 = new TrainingParameters( new ByteArrayInputStream("key1=val1\nkey2=val2\nkey3=val3\n".getBytes()) ); TrainingParameters tp3 = new TrainingParameters(tp2); assertEquals(tp1, tp2); assertEquals(tp2, tp3); } @Test void testDefault() { TrainingParameters tr = TrainingParameters.defaultParams(); Assertions.assertEquals(4, tr.getObjectSettings().size()); Assertions.assertEquals("MAXENT", tr.algorithm()); Assertions.assertEquals(EventTrainer.EVENT_VALUE, tr.getStringParameter(TrainingParameters.TRAINER_TYPE_PARAM, "v11")); // use different defaults Assertions.assertEquals(100, tr.getIntParameter(TrainingParameters.ITERATIONS_PARAM, 200)); // use different defaults Assertions.assertEquals(5, tr.getIntParameter(TrainingParameters.CUTOFF_PARAM, 200)); // use different defaults } @Test public void testSetParamsWithCLIParams() { String[] args = { "-model" , "en-token-test.bin" , "-alphaNumOpt" , "isAlphaNumOpt" , "-lang" , "en" , "-data" , "en-token.train" , "-encoding" , "UTF-8" , "-cutoff" , "10" , "-iterations" , "50" }; TrainingParameters tr = TrainingParameters.setParams(args); Assertions.assertEquals("MAXENT" , tr.algorithm()); Assertions.assertEquals(50 , tr.getIntParameter(TrainingParameters.ITERATIONS_PARAM , TrainingParameters.ITERATIONS_DEFAULT_VALUE)); Assertions.assertEquals(10 , tr.getIntParameter(TrainingParameters.CUTOFF_PARAM , TrainingParameters.CUTOFF_DEFAULT_VALUE)); } @Test public void testSetParamsWithoutCLIParams() { String[] args = { "-model" , "en-token-test.bin" , "-alphaNumOpt" , "isAlphaNumOpt" , "-lang" , "en" , "-data" , "en-token.train" , "-encoding" , "UTF-8" }; TrainingParameters tr = TrainingParameters.setParams(args); Assertions.assertEquals("MAXENT" , tr.algorithm()); Assertions.assertEquals(100 , tr.getIntParameter(TrainingParameters.ITERATIONS_PARAM , TrainingParameters.ITERATIONS_DEFAULT_VALUE)); Assertions.assertEquals(5 , tr.getIntParameter(TrainingParameters.CUTOFF_PARAM , TrainingParameters.CUTOFF_DEFAULT_VALUE)); } @Test public void testSetParamsWithoutCutoffCLIParams() { String[] args = { "-model" , "en-token-test.bin" , "-alphaNumOpt" , "isAlphaNumOpt" , "-lang" , "en" , "-data" , "en-token.train" , "-encoding" , "UTF-8" , "-iterations" , "50" }; TrainingParameters tr = TrainingParameters.setParams(args); Assertions.assertEquals("MAXENT" , tr.algorithm()); Assertions.assertEquals(50 , tr.getIntParameter(TrainingParameters.ITERATIONS_PARAM , TrainingParameters.ITERATIONS_DEFAULT_VALUE)); Assertions.assertEquals(5 , tr.getIntParameter(TrainingParameters.CUTOFF_PARAM , TrainingParameters.CUTOFF_DEFAULT_VALUE)); } @Test public void testSetParamsWithoutIterationsCLIParams() { String[] args = { "-model" , "en-token-test.bin" , "-alphaNumOpt" , "isAlphaNumOpt" , "-lang" , "en" , "-data" , "en-token.train" , "-encoding" , "UTF-8" , "-cutoff" , "10" }; TrainingParameters tr = TrainingParameters.setParams(args); Assertions.assertEquals("MAXENT" , tr.algorithm()); Assertions.assertEquals(100 , tr.getIntParameter(TrainingParameters.ITERATIONS_PARAM , TrainingParameters.ITERATIONS_DEFAULT_VALUE)); Assertions.assertEquals(10 , tr.getIntParameter(TrainingParameters.CUTOFF_PARAM , TrainingParameters.CUTOFF_DEFAULT_VALUE)); } @Test void testGetAlgorithm() { TrainingParameters tp = build("Algorithm=Perceptron,n1.Algorithm=SVM"); Assertions.assertEquals("Perceptron", tp.algorithm()); Assertions.assertEquals("SVM", tp.algorithm("n1")); } @Test void testGetAlgorithmCaseInsensitive() { TrainingParameters tp = build("ALGORITHM=Perceptron,n1.Algorithm=SVM"); Assertions.assertEquals("Perceptron", tp.algorithm()); Assertions.assertEquals("SVM", tp.algorithm("n1")); } @Test void testGetSettings() { TrainingParameters tp = build("k1=v1,n1.k2=v2,n2.k3=v3,n1.k4=v4"); assertEquals(buildMap("k1=v1"), tp.getObjectSettings()); assertEquals(buildMap("k2=v2,k4=v4"), tp.getObjectSettings("n1")); assertEquals(buildMap("k3=v3"), tp.getObjectSettings("n2")); Assertions.assertTrue(tp.getObjectSettings("n3").isEmpty()); } @Test void testGetParameters() { TrainingParameters tp = build("k1=v1,n1.k2=v2,n2.k3=v3,n1.k4=v4"); assertEquals(build("k1=v1"), tp.getParameters(null)); assertEquals(build("k2=v2,k4=v4"), tp.getParameters("n1")); assertEquals(build("k3=v3"), tp.getParameters("n2")); Assertions.assertTrue(tp.getParameters("n3").getObjectSettings().isEmpty()); } @Test void testPutGet() { TrainingParameters tp = build("k1=v1,int.k2=123,str.k2=v3,str.k3=v4,boolean.k4=false,double.k5=123.45,k21=234.5"); Assertions.assertEquals("v1", tp.getStringParameter("k1", "def")); Assertions.assertEquals("def", tp.getStringParameter("k2", "def")); Assertions.assertEquals("v3", tp.getStringParameter("str", "k2", "def")); Assertions.assertEquals("def", tp.getStringParameter("str", "k4", "def")); Assertions.assertEquals(-100, tp.getIntParameter("k11", -100)); tp.put("k11", 234); Assertions.assertEquals(234, tp.getIntParameter("k11", -100)); Assertions.assertEquals(123, tp.getIntParameter("int", "k2", -100)); Assertions.assertEquals(-100, tp.getIntParameter("int", "k4", -100)); Assertions.assertEquals(tp.getDoubleParameter("k21", -100), 0.001, 234.5); tp.put("k21", 345.6); Assertions.assertEquals(tp.getDoubleParameter("k21", -100), 0.001, 345.6); // should be changed tp.putIfAbsent("k21", 456.7); Assertions.assertEquals(tp.getDoubleParameter("k21", -100), 0.001, 345.6); // should be unchanged Assertions.assertEquals(tp.getDoubleParameter("double", "k5", -100), 0.001, 123.45); Assertions.assertTrue(tp.getBooleanParameter("k31", true)); tp.put("k31", false); Assertions.assertFalse(tp.getBooleanParameter("k31", true)); Assertions.assertFalse(tp.getBooleanParameter("boolean", "k4", true)); } // format: k1=v1,k2=v2,... private static Map<String, Object> buildMap(String str) { String[] pairs = str.split(","); Map<String, Object> map = new HashMap<>(pairs.length); for (String pair : pairs) { String[] keyValue = pair.split("="); map.put(keyValue[0], keyValue[1]); } return map; } // format: k1=v1,k2=v2,... private static TrainingParameters build(String str) { return new TrainingParameters(buildMap(str)); } private static void assertEquals(Map<String, Object> map1, Map<String, Object> map2) { Assertions.assertNotNull(map1); Assertions.assertNotNull(map2); Assertions.assertEquals(map1.size(), map2.size()); for (String key : map1.keySet()) { Assertions.assertEquals(map1.get(key), map2.get(key)); } } private static void assertEquals(Map<String, Object> map, TrainingParameters actual) { Assertions.assertNotNull(actual); assertEquals(map, actual.getObjectSettings()); } private static void assertEquals(TrainingParameters expected, TrainingParameters actual) { if (expected == null) { Assertions.assertNull(actual); } else { assertEquals(expected.getObjectSettings(), actual); } } }
{ "content_hash": "6b370c5b45c9e49f77d16d901fc28a6d", "timestamp": "", "source": "github", "line_count": 215, "max_line_length": 104, "avg_line_length": 37.27441860465116, "alnum_prop": 0.6771899176441227, "repo_name": "apache/opennlp", "id": "dd05af6e7d1b4effe3eb06eb2bdc05f9433f586e", "size": "8814", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "opennlp-tools/src/test/java/opennlp/tools/util/TrainingParametersTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "7363" }, { "name": "CSS", "bytes": "1740" }, { "name": "Dockerfile", "bytes": "182" }, { "name": "HTML", "bytes": "9998" }, { "name": "Java", "bytes": "4689483" }, { "name": "Shell", "bytes": "5699" }, { "name": "XSLT", "bytes": "1286" } ] }
require 'sinatra' require 'sinatra/json' require 'mongo' module Nightwatch class Server < Sinatra::Application set :run, false set :server, 'thin' # TODO: Allow server configuration via command-line or settings file. mongo = Mongo::MongoClient.new events = mongo['nightwatch']['events'] get '/' do erb :index end get '/events' do json events.find.to_a end get '/events/summary' do json events.find({}, fields: [ 'exception.class', 'exception.message', 'host.name', 'timestamp' ]).to_a end get '/events/:id' do json events.find(_id: BSON::ObjectId(params[:id])).first end end end
{ "content_hash": "170d03483e3a0efb0eba63cfba829f33", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 73, "avg_line_length": 20.085714285714285, "alnum_prop": 0.5988620199146515, "repo_name": "schmich/nightwatch", "id": "c590fde0cfe3109655ec48d02b19d6999e6b48f8", "size": "703", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/nightwatch/web/server.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2881" }, { "name": "JavaScript", "bytes": "2169" }, { "name": "Ruby", "bytes": "18319" } ] }
package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.collect.Tuple; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.LongSupplier; import java.util.stream.Collectors; /** * A Processor that executes a list of other "processors". It executes a separate list of * "onFailureProcessors" when any of the processors throw an {@link Exception}. */ public class CompoundProcessor implements Processor { public static final String ON_FAILURE_MESSAGE_FIELD = "on_failure_message"; public static final String ON_FAILURE_PROCESSOR_TYPE_FIELD = "on_failure_processor_type"; public static final String ON_FAILURE_PROCESSOR_TAG_FIELD = "on_failure_processor_tag"; public static final String ON_FAILURE_PIPELINE_FIELD = "on_failure_pipeline"; private final boolean ignoreFailure; private final List<Processor> processors; private final List<Processor> onFailureProcessors; private final List<Tuple<Processor, IngestMetric>> processorsWithMetrics; private final LongSupplier relativeTimeProvider; CompoundProcessor(LongSupplier relativeTimeProvider, Processor... processor) { this(false, Arrays.asList(processor), Collections.emptyList(), relativeTimeProvider); } public CompoundProcessor(Processor... processor) { this(false, Arrays.asList(processor), Collections.emptyList()); } public CompoundProcessor(boolean ignoreFailure, List<Processor> processors, List<Processor> onFailureProcessors) { this(ignoreFailure, processors, onFailureProcessors, System::nanoTime); } CompoundProcessor(boolean ignoreFailure, List<Processor> processors, List<Processor> onFailureProcessors, LongSupplier relativeTimeProvider) { super(); this.ignoreFailure = ignoreFailure; this.processors = processors; this.onFailureProcessors = onFailureProcessors; this.relativeTimeProvider = relativeTimeProvider; this.processorsWithMetrics = new ArrayList<>(processors.size()); processors.forEach(p -> processorsWithMetrics.add(new Tuple<>(p, new IngestMetric()))); } List<Tuple<Processor, IngestMetric>> getProcessorsWithMetrics() { return processorsWithMetrics; } public boolean isIgnoreFailure() { return ignoreFailure; } public List<Processor> getOnFailureProcessors() { return onFailureProcessors; } public List<Processor> getProcessors() { return processors; } public List<Processor> flattenProcessors() { List<Processor> allProcessors = new ArrayList<>(flattenProcessors(processors)); allProcessors.addAll(flattenProcessors(onFailureProcessors)); return allProcessors; } private static List<Processor> flattenProcessors(List<Processor> processors) { List<Processor> flattened = new ArrayList<>(); for (Processor processor : processors) { if (processor instanceof CompoundProcessor) { flattened.addAll(((CompoundProcessor) processor).flattenProcessors()); } else { flattened.add(processor); } } return flattened; } @Override public String getType() { return "compound"; } @Override public String getTag() { return "CompoundProcessor-" + flattenProcessors().stream().map(Processor::getTag).collect(Collectors.joining("-")); } @Override public IngestDocument execute(IngestDocument ingestDocument) throws Exception { throw new UnsupportedOperationException("this method should not get executed"); } @Override public void execute(IngestDocument ingestDocument, BiConsumer<IngestDocument, Exception> handler) { innerExecute(0, ingestDocument, handler); } void innerExecute(int currentProcessor, IngestDocument ingestDocument, BiConsumer<IngestDocument, Exception> handler) { if (currentProcessor == processorsWithMetrics.size()) { handler.accept(ingestDocument, null); return; } Tuple<Processor, IngestMetric> processorWithMetric = processorsWithMetrics.get(currentProcessor); final Processor processor = processorWithMetric.v1(); final IngestMetric metric = processorWithMetric.v2(); final long startTimeInNanos = relativeTimeProvider.getAsLong(); metric.preIngest(); processor.execute(ingestDocument, (result, e) -> { long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeInNanos); metric.postIngest(ingestTimeInMillis); if (e != null) { metric.ingestFailed(); if (ignoreFailure) { innerExecute(currentProcessor + 1, ingestDocument, handler); } else { IngestProcessorException compoundProcessorException = newCompoundProcessorException(e, processor, ingestDocument); if (onFailureProcessors.isEmpty()) { handler.accept(null, compoundProcessorException); } else { executeOnFailureAsync(0, ingestDocument, compoundProcessorException, handler); } } } else { if (result != null) { innerExecute(currentProcessor + 1, result, handler); } else { handler.accept(null, null); } } }); } void executeOnFailureAsync(int currentOnFailureProcessor, IngestDocument ingestDocument, ElasticsearchException exception, BiConsumer<IngestDocument, Exception> handler) { if (currentOnFailureProcessor == 0) { putFailureMetadata(ingestDocument, exception); } if (currentOnFailureProcessor == onFailureProcessors.size()) { removeFailureMetadata(ingestDocument); handler.accept(ingestDocument, null); return; } final Processor onFailureProcessor = onFailureProcessors.get(currentOnFailureProcessor); onFailureProcessor.execute(ingestDocument, (result, e) -> { if (e != null) { removeFailureMetadata(ingestDocument); handler.accept(null, newCompoundProcessorException(e, onFailureProcessor, ingestDocument)); return; } if (result == null) { removeFailureMetadata(ingestDocument); handler.accept(null, null); return; } executeOnFailureAsync(currentOnFailureProcessor + 1, ingestDocument, exception, handler); }); } private void putFailureMetadata(IngestDocument ingestDocument, ElasticsearchException cause) { List<String> processorTypeHeader = cause.getHeader("processor_type"); List<String> processorTagHeader = cause.getHeader("processor_tag"); List<String> processorOriginHeader = cause.getHeader("pipeline_origin"); String failedProcessorType = (processorTypeHeader != null) ? processorTypeHeader.get(0) : null; String failedProcessorTag = (processorTagHeader != null) ? processorTagHeader.get(0) : null; String failedPipelineId = (processorOriginHeader != null) ? processorOriginHeader.get(0) : null; Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata(); ingestMetadata.put(ON_FAILURE_MESSAGE_FIELD, cause.getRootCause().getMessage()); ingestMetadata.put(ON_FAILURE_PROCESSOR_TYPE_FIELD, failedProcessorType); ingestMetadata.put(ON_FAILURE_PROCESSOR_TAG_FIELD, failedProcessorTag); if (failedPipelineId != null) { ingestMetadata.put(ON_FAILURE_PIPELINE_FIELD, failedPipelineId); } } private void removeFailureMetadata(IngestDocument ingestDocument) { Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata(); ingestMetadata.remove(ON_FAILURE_MESSAGE_FIELD); ingestMetadata.remove(ON_FAILURE_PROCESSOR_TYPE_FIELD); ingestMetadata.remove(ON_FAILURE_PROCESSOR_TAG_FIELD); ingestMetadata.remove(ON_FAILURE_PIPELINE_FIELD); } static IngestProcessorException newCompoundProcessorException(Exception e, Processor processor, IngestDocument document) { if (e instanceof IngestProcessorException && ((IngestProcessorException) e).getHeader("processor_type") != null) { return (IngestProcessorException) e; } IngestProcessorException exception = new IngestProcessorException(e); String processorType = processor.getType(); if (processorType != null) { exception.addHeader("processor_type", processorType); } String processorTag = processor.getTag(); if (processorTag != null) { exception.addHeader("processor_tag", processorTag); } List<String> pipelineStack = document.getPipelineStack(); if (pipelineStack.size() > 1) { exception.addHeader("pipeline_origin", pipelineStack); } return exception; } }
{ "content_hash": "eee7a6c9e6a65ceee8cda1f96ffe4a9e", "timestamp": "", "source": "github", "line_count": 223, "max_line_length": 126, "avg_line_length": 42.39013452914798, "alnum_prop": 0.6718502062837195, "repo_name": "HonzaKral/elasticsearch", "id": "9cc414c5a15d6fda09971f82dd267d60c9ecfa83", "size": "10241", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "21661311" }, { "name": "Python", "bytes": "23898" }, { "name": "Ruby", "bytes": "17975" }, { "name": "Shell", "bytes": "27701" } ] }
<?php return [ /* |-------------------------------------------------------------------------- | Pagination Language Lines |-------------------------------------------------------------------------- | | The following language lines are used by the paginator library to build | the simple pagination links. You are free to change them to anything | you want to customize your views to better match your application. | */ 'previous' => '&laquo; Prapa', 'next' => 'Para &raquo;', ];
{ "content_hash": "199fe5d48bd8c7dbacc3014001b3035a", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 79, "avg_line_length": 28.157894736842106, "alnum_prop": 0.4373831775700935, "repo_name": "ifthenelse42/archontos", "id": "0f13123d4a2289c9a98f62ae2ff1e226bc589d8c", "size": "535", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "vendor/caouecs/laravel-lang/src/sq/pagination.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "552" }, { "name": "HTML", "bytes": "113550" }, { "name": "JavaScript", "bytes": "21043" }, { "name": "PHP", "bytes": "295926" } ] }
<?php /* Safe sample input : use fopen to read /tmp/tainted.txt and put the first line in $tainted sanitize : use of floatval construction : interpretation */ /*Copyright 2015 Bertrand STIVALET Permission is hereby granted, without written agreement or royalty fee, to use, copy, modify, and distribute this software and its documentation for any purpose, provided that the above copyright notice and the following three paragraphs appear in all copies of this software. IN NO EVENT SHALL AUTHORS BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF AUTHORS HAVE BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. AUTHORS SPECIFICALLY DISCLAIM ANY WARRANTIES INCLUDING, BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THE SOFTWARE IS PROVIDED ON AN "AS-IS" BASIS AND AUTHORS HAVE NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.*/ $handle = @fopen("/tmp/tainted.txt", "r"); if ($handle) { if(($tainted = fgets($handle, 4096)) == false) { $tainted = ""; } fclose($handle); } else { $tainted = ""; } $tainted = floatval($tainted); $query = "//User[@id= $tainted ]"; $xml = simplexml_load_file("users.xml");//file load echo "query : ". $query ."<br /><br />" ; $res=$xml->xpath($query);//execution print_r($res); echo "<br />" ; ?>
{ "content_hash": "c2475e4a8b56aa042574d18a0b3accc7", "timestamp": "", "source": "github", "line_count": 67, "max_line_length": 77, "avg_line_length": 22.17910447761194, "alnum_prop": 0.721399730820996, "repo_name": "stivalet/PHP-Vulnerability-test-suite", "id": "b9e1fb03b469f3d21d1d6d646f316d8021058063", "size": "1486", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Injection/CWE_91/safe/CWE_91__fopen__func_floatval__ID_test-interpretation.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "64184004" } ] }
namespace BizHawk.Client.EmuHawk { partial class MobileDetailView { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel(); this.bmpView1 = new BizHawk.Client.EmuHawk.BmpView(); this.listView1 = new System.Windows.Forms.ListView(); this.columnHeader1 = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader())); this.columnHeader2 = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader())); this.tableLayoutPanel1.SuspendLayout(); this.SuspendLayout(); // // tableLayoutPanel1 // this.tableLayoutPanel1.ColumnCount = 1; this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 100F)); this.tableLayoutPanel1.Controls.Add(this.bmpView1, 0, 0); this.tableLayoutPanel1.Controls.Add(this.listView1, 0, 1); this.tableLayoutPanel1.Dock = System.Windows.Forms.DockStyle.Fill; this.tableLayoutPanel1.Location = new System.Drawing.Point(0, 0); this.tableLayoutPanel1.Name = "tableLayoutPanel1"; this.tableLayoutPanel1.RowCount = 2; this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 140F)); this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 100F)); this.tableLayoutPanel1.Size = new System.Drawing.Size(142, 332); this.tableLayoutPanel1.TabIndex = 0; // // bmpView1 // this.bmpView1.Dock = System.Windows.Forms.DockStyle.Fill; this.bmpView1.Location = new System.Drawing.Point(3, 3); this.bmpView1.Name = "bmpView1"; this.bmpView1.Size = new System.Drawing.Size(136, 134); this.bmpView1.TabIndex = 0; this.bmpView1.Text = "bmpView1"; // // listView1 // this.listView1.BorderStyle = System.Windows.Forms.BorderStyle.None; this.listView1.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] { this.columnHeader1, this.columnHeader2}); this.listView1.Dock = System.Windows.Forms.DockStyle.Fill; this.listView1.GridLines = true; this.listView1.HeaderStyle = System.Windows.Forms.ColumnHeaderStyle.None; this.listView1.Location = new System.Drawing.Point(3, 143); this.listView1.Name = "listView1"; this.listView1.Size = new System.Drawing.Size(136, 186); this.listView1.TabIndex = 1; this.listView1.UseCompatibleStateImageBehavior = false; this.listView1.View = System.Windows.Forms.View.Details; this.listView1.SizeChanged += new System.EventHandler(this.listView1_SizeChanged); // // MobileDetailView // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.ClientSize = new System.Drawing.Size(142, 332); this.Controls.Add(this.tableLayoutPanel1); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.SizableToolWindow; this.Name = "MobileDetailView"; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent; this.Text = "MobileDetailView"; this.SizeChanged += new System.EventHandler(this.MobileDetailView_SizeChanged); this.tableLayoutPanel1.ResumeLayout(false); this.ResumeLayout(false); } #endregion private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1; private BmpView bmpView1; private System.Windows.Forms.ListView listView1; private System.Windows.Forms.ColumnHeader columnHeader1; private System.Windows.Forms.ColumnHeader columnHeader2; } }
{ "content_hash": "41a695e34a7ebf3e9e309e8e25402789", "timestamp": "", "source": "github", "line_count": 104, "max_line_length": 135, "avg_line_length": 45.875, "alnum_prop": 0.65625654998952, "repo_name": "ircluzar/RTC3", "id": "4338429bb3f23103380eda186d2a5de5f4bcd69c", "size": "4773", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Real-Time Corruptor/BizHawk_RTC/BizHawk.Client.EmuHawk/tools/GBA/MobileDetailView.Designer.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "77250" }, { "name": "Batchfile", "bytes": "11077" }, { "name": "C", "bytes": "10464062" }, { "name": "C#", "bytes": "14062504" }, { "name": "C++", "bytes": "15252473" }, { "name": "CSS", "bytes": "74" }, { "name": "GLSL", "bytes": "6610" }, { "name": "HTML", "bytes": "426873" }, { "name": "Limbo", "bytes": "15313" }, { "name": "Lua", "bytes": "312676" }, { "name": "Makefile", "bytes": "127427" }, { "name": "Objective-C", "bytes": "40789" }, { "name": "PHP", "bytes": "863004" }, { "name": "POV-Ray SDL", "bytes": "206" }, { "name": "Python", "bytes": "27842" }, { "name": "Shell", "bytes": "19693" }, { "name": "Smalltalk", "bytes": "1719" } ] }
<?php /** Template Page for the image browser Follow variables are useable : $image : Contain all about the image $meta : Contain the raw Meta data from the image $exif : Contain the clean up Exif data $iptc : Contain the clean up IPTC data $xmp : Contain the clean up XMP data You can check the content when you insert the tag <?php var_dump($variable) ?> If you would like to show the timestamp of the image ,you can use <?php echo $exif['created_timestamp'] ?> **/ ?> <?php if (!defined ('ABSPATH')) die ('No direct access allowed'); ?><?php if (!empty ($image)) : ?> <div class="ngg-imagebrowser" id="<?php echo $image->anchor ?>" data-nextgen-gallery-id="<?php echo $displayed_gallery->id(); ?>"> <h3><?php echo $image->alttext ?></h3> <div class="pic"><?php echo $image->href_link ?></div> <div class="ngg-imagebrowser-nav"> <div class="back"> <a class="ngg-browser-prev" id="ngg-prev-<?php echo $image->previous_pid ?>" href="<?php echo nextgen_esc_url($image->previous_image_link) ?>">&#9668; <?php _e('Back', 'nggallery') ?></a> </div> <div class="next"> <a class="ngg-browser-next" id="ngg-next-<?php echo $image->next_pid ?>" href="<?php echo nextgen_esc_url($image->next_image_link) ?>"><?php _e('Next', 'nggallery') ?> &#9658;</a> </div> <div class="counter"><?php _e('Picture', 'nggallery') ?> <?php echo $image->number ?> <?php _e('of', 'nggallery')?> <?php echo $image->total ?></div> <div class="ngg-imagebrowser-desc"><p><?php echo $image->description ?></p></div> </div> </div> <?php endif; ?>
{ "content_hash": "275e539f56fe58a68baacc2566ec9245", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 190, "avg_line_length": 42.270270270270274, "alnum_prop": 0.6342710997442456, "repo_name": "pablopalillo/rojosporlasexta", "id": "803e27c401d03dccf2b69b849407010b3e1553be", "size": "1564", "binary": false, "copies": "71", "ref": "refs/heads/master", "path": "wp-content/plugins/nextgen-gallery/products/photocrati_nextgen/modules/ngglegacy/view/imagebrowser-caption.php", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "643" }, { "name": "CSS", "bytes": "3307078" }, { "name": "HTML", "bytes": "27177" }, { "name": "JavaScript", "bytes": "3932744" }, { "name": "Modelica", "bytes": "10338" }, { "name": "PHP", "bytes": "17798085" }, { "name": "Perl", "bytes": "2554" } ] }
/** * A mixin for groups of Focusable things (Components, Widgets, etc) that * should respond to arrow keys to navigate among the peers, but keep only * one of the peers tabbable by default (tabIndex=0) * * Some examples: Toolbars, Radio groups, Tab bars, Panel headers, Menus */ Ext.define('Ext.util.FocusableContainer', { extend: 'Ext.Mixin', requires: [ 'Ext.util.KeyNav' ], mixinConfig: { id: 'focusablecontainer', before: { onAdd: 'onFocusableChildAdd', onRemove: 'onFocusableChildRemove', destroy: 'destroyFocusableContainer', onFocusEnter: 'onFocusEnter' }, after: { afterRender: 'initFocusableContainer', onFocusLeave: 'onFocusLeave', afterShow: 'activateFocusableContainerEl' } }, isFocusableContainer: true, /** * @cfg {Boolean} [enableFocusableContainer=true] Enable or disable * navigation with arrow keys for this FocusableContainer. This option may * be useful with nested FocusableContainers such as Grid column headers, * when only the root container should handle keyboard events. */ enableFocusableContainer: true, /** * @cfg {Number} [activeChildTabIndex=0] DOM tabIndex attribute to set on the * active Focusable child of this container when using the "Roaming tabindex" * technique. Set this value to > 0 to precisely control the tabbing order * of the components/containers on the page. */ activeChildTabIndex: 0, /** * @cfg {Number} [inactiveChildTabIndex=-1] DOM tabIndex attribute to set on * inactive Focusable children of this container when using the "Roaming tabindex" * technique. This value rarely needs to be changed from its default. */ inactiveChildTabIndex: -1, privates: { initFocusableContainer: function(clearChildren) { var items, i, len; // Allow nested containers to optionally disable // children containers' behavior if (this.enableFocusableContainer) { clearChildren = clearChildren != null ? clearChildren : true; this.doInitFocusableContainer(clearChildren); } // A FocusableContainer instance such as a toolbar could have decided // to opt out of FC behavior for some reason; it could have happened // after all or almost all child items have been initialized with // focusableContainer reference. We need to clean this up if we're not // going to behave like a FocusableContainer after all. else { items = this.getFocusables(); for (i = 0, len = items.length; i < len; i++) { items[i].focusableContainer = null; } } }, doInitFocusableContainer: function(clearChildren) { var me = this, el, child; el = me.getFocusableContainerEl(); // This flag allows post factum initialization of the focusable container, // i.e. when container was empty initially and then some tabbable children // were added and we need to clear their tabIndices after priming our own // element's tabIndex. // This is useful for Panel and Window headers that might have tools // added dynamically. if (clearChildren) { me.clearFocusables(); } // If we have no potentially focusable children, or all potentially focusable // children are presently disabled, don't init the container el tabIndex. // There is no point in tabbing into container when it can't shift focus // to a child. child = me.findNextFocusableChild({ step: 1, beforeRender: true }); if (child) { // We set tabIndex on the focusable container el so that the user // could tab into it; we catch its focus event and focus a child instead me.activateFocusableContainerEl(el); } // Unsightly long names help to avoid possible clashing with class // or instance properties. We have to be extra careful in a mixin! me.focusableContainerMouseListener = me.mon( el, 'mousedown', me.onFocusableContainerMousedown, me ); // Having keyNav doesn't hurt when container el is not focusable me.focusableKeyNav = me.createFocusableContainerKeyNav(el); }, createFocusableContainerKeyNav: function(el) { var me = this; return new Ext.util.KeyNav(el, { eventName: 'keydown', ignoreInputFields: true, scope: me, tab: me.onFocusableContainerTabKey, enter: me.onFocusableContainerEnterKey, space: me.onFocusableContainerSpaceKey, up: me.onFocusableContainerUpKey, down: me.onFocusableContainerDownKey, left: me.onFocusableContainerLeftKey, right: me.onFocusableContainerRightKey }); }, destroyFocusableContainer: function() { if (this.enableFocusableContainer) { this.doDestroyFocusableContainer(); } }, doDestroyFocusableContainer: function() { var me = this; if (me.keyNav) { me.keyNav.destroy(); } if (me.focusableContainerMouseListener) { me.focusableContainerMouseListener.destroy(); } me.focusableKeyNav = me.focusableContainerMouseListener = null; }, // Default FocusableContainer implies a flat list of focusable children getFocusables: function() { return this.items.items; }, initDefaultFocusable: function(beforeRender) { var me = this, activeIndex = me.activeChildTabIndex, haveFocusable = false, items, item, i, len, tabIdx; items = me.getFocusables(); len = items.length; if (!len) { return; } // Check if any child Focusable is already active. // Note that we're not determining *which* focusable child // to focus here, only that we have some focusables. for (i = 0; i < len; i++) { item = items[i]; if (item.focusable && !item.disabled) { haveFocusable = true; tabIdx = item.getTabIndex(); if (tabIdx != null && tabIdx >= activeIndex) { return item; } } } // No interactive children found, no point in going further if (!haveFocusable) { return; } // No child is focusable by default, so the first *interactive* // one gets initial childTabIndex. We are not looking for a focusable // child here because it may not be focusable yet if this happens // before rendering; we assume that an interactive child will become // focusable later and now activateFocusable() will just assign it // the respective tabIndex. item = me.findNextFocusableChild({ beforeRender: beforeRender, items: items, step: true }); if (item) { me.activateFocusable(item); } return item; }, clearFocusables: function() { var me = this, items = me.getFocusables(), len = items.length, item, i; for (i = 0; i < len; i++) { item = items[i]; if (item.focusable && !item.disabled) { me.deactivateFocusable(item); } } }, activateFocusable: function(child, /* optional */ newTabIndex) { var activeIndex = newTabIndex != null ? newTabIndex : this.activeChildTabIndex; child.setTabIndex(activeIndex); }, deactivateFocusable: function(child, /* optional */ newTabIndex) { var inactiveIndex = newTabIndex != null ? newTabIndex : this.inactiveChildTabIndex; child.setTabIndex(inactiveIndex); }, onFocusableContainerTabKey: function() { return true; }, onFocusableContainerEnterKey: function() { return true; }, onFocusableContainerSpaceKey: function() { return true; }, onFocusableContainerUpKey: function(e) { // Default action is to scroll the nearest vertically scrollable container e.preventDefault(); return this.moveChildFocus(e, false); }, onFocusableContainerDownKey: function(e) { // Ditto e.preventDefault(); return this.moveChildFocus(e, true); }, onFocusableContainerLeftKey: function(e) { // Default action is to scroll the nearest horizontally scrollable container e.preventDefault(); return this.moveChildFocus(e, false); }, onFocusableContainerRightKey: function(e) { // Ditto e.preventDefault(); return this.moveChildFocus(e, true); }, getFocusableFromEvent: function(e) { var child = Ext.Component.fromElement(e.getTarget()); //<debug> if (!child) { Ext.raise("No focusable child found for keyboard event!"); } //</debug> return child; }, moveChildFocus: function(e, forward) { var child = this.getFocusableFromEvent(e); return this.focusChild(child, forward, e); }, focusChild: function(child, forward) { var nextChild = this.findNextFocusableChild({ child: child, step: forward }); if (nextChild) { nextChild.focus(); } return nextChild; }, findNextFocusableChild: function(options) { // This method is private, so options should always be provided var beforeRender = options.beforeRender, items, item, child, step, idx, i, len; items = options.items || this.getFocusables(); step = options.step != null ? options.step : 1; child = options.child; // If the child is null or undefined, idx will be -1. // The loop below will account for that, trying to find // the first focusable child from either end (depending on step) idx = Ext.Array.indexOf(items, child); // It's often easier to pass a boolean for 1/-1 step = step === true ? 1 : step === false ? -1 : step; len = items.length; i = step > 0 ? (idx < len ? idx + step : 0) : (idx > 0 ? idx + step : len - 1); for (;; i += step) { // We're looking for the first or last focusable child // and we've reached the end of the items, so punt if (idx < 0 && (i >= len || i < 0)) { return null; } // Loop over forward else if (i >= len) { i = -1; // Iterator will increase it once more continue; } // Loop over backward else if (i < 0) { i = len; continue; } // Looped to the same item, give up else if (i === idx) { return null; } item = items[i]; if (!item || !item.focusable || item.disabled) { continue; } // This loop can be run either at FocusableContainer init time, // or later when we need to navigate upon pressing an arrow key. // When we're navigating, we have to know exactly if the child is // focusable or not, hence only rendered children will make the cut. // At the init time item.isFocusable() may return false incorrectly // just because the item has not been rendered yet and its focusEl // is not defined, so we don't bother to call isFocusable and return // the first potentially focusable child. if (beforeRender || (item.isFocusable && item.isFocusable())) { return item; } } return null; }, getFocusableContainerEl: function() { return this.el; }, onFocusableChildAdd: function(child) { if (this.enableFocusableContainer) { return this.doFocusableChildAdd(child); } }, activateFocusableContainerEl: function(el) { el = el || this.getFocusableContainerEl(); // Might not yet be rendered if (el) { el.set({ tabIndex: this.activeChildTabIndex }); } }, deactivateFocusableContainerEl: function(el) { el = el || this.getFocusableContainerEl(); if (el) { el.set({ tabIndex: undefined }); } }, isFocusableContainerActive: function() { var me = this, isActive = false, el, child, focusEl; el = me.getFocusableContainerEl(); if (el && el.isTabbable && el.isTabbable()) { isActive = true; } else { child = me.lastFocusedChild; focusEl = child && child.getFocusEl && child.getFocusEl(); if (focusEl && focusEl.isTabbable && focusEl.isTabbable()) { isActive = true; } } return isActive; }, doFocusableChildAdd: function(child) { if (child.focusable) { child.focusableContainer = this; } }, onFocusableChildRemove: function(child) { if (this.enableFocusableContainer) { return this.doFocusableChildRemove(child); } child.focusableContainer = null; }, doFocusableChildRemove: function(child) { // If the focused child is being removed, we deactivate the FocusableContainer // So that it returns to the tabbing order. // For example, locking a grid column must return the owning HeaderContainer // to tabbability if (child === this.lastFocusedChild) { this.lastFocusedChild = null; this.activateFocusableContainerEl(); } }, onFocusableContainerMousedown: function(e, target) { var targetCmp = Ext.Component.fromElement(target); // Capture the timestamp for the mousedown. If we're navigating // into the container itself via the mouse we don't want to // default focus the first child like we would when using the keyboard. // By the time we get to the focusenter handling, we don't know what has caused // the focus to be triggered, so if the timestamp falls within some small epsilon, // the focus enter has been caused via the mouse and we can react accordingly. this.mousedownTimestamp = targetCmp === this ? Ext.Date.now() : 0; // Prevent focusing the container itself. DO NOT remove this clause, it is // untestable by our unit tests: injecting mousedown events will not cause // default action in the browser, the element never gets focus and tests // never fail. See http://www.w3.org/TR/DOM-Level-3-Events/#trusted-events if (targetCmp === this) { e.preventDefault(); } }, onFocusEnter: function(e) { var me = this, target = e.toComponent, mousedownTimestamp = me.mousedownTimestamp, epsilon = 50, child; if (!me.enableFocusableContainer) { return null; } me.mousedownTimestamp = 0; if (target === me) { if (!mousedownTimestamp || Ext.Date.now() - mousedownTimestamp > epsilon) { child = me.initDefaultFocusable(); if (child) { me.deactivateFocusableContainerEl(); child.focus(); } } } else { me.deactivateFocusableContainerEl(); } return target; }, onFocusLeave: function(e) { var me = this, lastFocused = me.lastFocusedChild; if (!me.enableFocusableContainer) { return; } if (!me.destroyed && !me.destroying) { me.clearFocusables(); if (lastFocused && !lastFocused.disabled) { me.activateFocusable(lastFocused); } else { me.activateFocusableContainerEl(); } } }, beforeFocusableChildBlur: Ext.privateFn, afterFocusableChildBlur: Ext.privateFn, beforeFocusableChildFocus: function(child) { var me = this; if (!me.enableFocusableContainer) { return; } me.clearFocusables(); me.activateFocusable(child); if (child.needArrowKeys) { me.guardFocusableChild(child); } }, guardFocusableChild: function(child) { var me = this, index = me.activeChildTabIndex, guard; guard = me.findNextFocusableChild({ child: child, step: -1 }); if (guard) { guard.setTabIndex(index); } guard = me.findNextFocusableChild({ child: child, step: 1 }); if (guard) { guard.setTabIndex(index); } }, afterFocusableChildFocus: function(child) { if (!this.enableFocusableContainer) { return; } this.lastFocusedChild = child; }, beforeFocusableChildEnable: Ext.privateFn, onFocusableChildEnable: function(child) { var me = this; if (!me.enableFocusableContainer) { return; } // Some Components like Buttons do not render tabIndex attribute // when they start their lifecycle disabled, or remove tabIndex // if they get disabled later. Subsequently, such Components will // reset their tabIndex to default configured value upon enabling. // We don't want these children to be tabbable so we reset their // tabIndex yet again, unless this child is the last focused one. if (child !== me.lastFocusedChild) { me.deactivateFocusable(child); if (!me.isFocusableContainerActive()) { me.activateFocusableContainerEl(); } } }, beforeFocusableChildDisable: function(child) { var me = this, nextTarget; if (!me.enableFocusableContainer || me.destroying || me.destroyed) { return; } // When currently focused child is about to be disabled, // it may lose the focus as well. For example, Buttons // will remove tabIndex attribute upon disabling, which // in turn will throw focus to the document body and cause // onFocusLeave to fire on the FocusableContainer. // We're focusing the next sibling to prevent that. if (child.hasFocus) { nextTarget = me.findNextFocusableChild({ child: child }) || child.findFocusTarget(); // Note that it is entirely possible not to find the nextTarget, // e.g. when we're disabling the last button in a toolbar rendered // directly into document body. We don't have a good way to handle // such cases at present. if (nextTarget) { nextTarget.focus(); } } }, onFocusableChildDisable: function(child) { var me = this, lastFocused = me.lastFocusedChild, firstFocusableChild; if (!me.enableFocusableContainer || me.destroying || me.destroyed) { return; } // If the disabled child was the last focused item of this // FocusableContainer, we have to reset the tabbability of // our container element. if (child === lastFocused) { me.activateFocusableContainerEl(); } // It is also possible that the disabled child was the last // focusable child of this container, in which case we need // to make the container untabbable. firstFocusableChild = me.findNextFocusableChild({ step: 1 }); if (!firstFocusableChild) { me.deactivateFocusableContainerEl(); } }, // TODO onFocusableChildShow: Ext.privateFn, onFocusableChildHide: Ext.privateFn, onFocusableChildMasked: Ext.privateFn, onFocusableChildDestroy: Ext.privateFn, onFocusableChildUpdate: Ext.privateFn } });
{ "content_hash": "cf2519546919d76d23bb428becec7f4d", "timestamp": "", "source": "github", "line_count": 654, "max_line_length": 95, "avg_line_length": 36.90214067278288, "alnum_prop": 0.4970995276373581, "repo_name": "sys0pp/c5", "id": "2d8f33b4ff71486fb6feb8007562ad87b358db82", "size": "24134", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "ext/classic/classic/src/util/FocusableContainer.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "5589885" }, { "name": "HTML", "bytes": "191218" }, { "name": "JavaScript", "bytes": "77475399" }, { "name": "Ruby", "bytes": "9559" } ] }
class Circle::ActivateMember < Mutations::Command required do integer :circle_id integer :user_id end def execute activate_user notify_user end private def activate_user user.active! end def notify_user UserMailer.account_activated(user).deliver_now end def circle Circle.find(circle_id) end def user circle.users.find(user_id) end end
{ "content_hash": "c3fa2f8aebe6ff79ba829eff725164a0", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 50, "avg_line_length": 13, "alnum_prop": 0.674937965260546, "repo_name": "phil-monroe/lale-help", "id": "6be32259d49a6b6ee600ffb52811b3fa102ee965", "size": "403", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/mutations/circle/activate_member.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "85154" }, { "name": "CoffeeScript", "bytes": "8299" }, { "name": "HTML", "bytes": "85553" }, { "name": "JavaScript", "bytes": "358" }, { "name": "Ruby", "bytes": "230743" }, { "name": "Shell", "bytes": "1090" } ] }
require File.dirname(__FILE__) + '/../../../spec_helper' describe Pkgr::Distributions::Base do let(:config) { Pkgr::Config.new } let(:distribution) { Pkgr::Distributions::Base.new("7.4", config) } describe "pre/post install" do it "has a preinstall file" do expect(distribution.preinstall_file).to_not be_nil end it "has a postinstall file" do expect(distribution.postinstall_file).to_not be_nil end end describe "initializers" do let(:runner) { double(:runner) } it "has no initializer if no process in procfile" do expect(distribution.initializers_for("my-app", [])).to be_empty end it "has one set of initializer per daemon process declared in the procfile" do processes = [ Pkgr::Process.new("web", "rails s"), Pkgr::Process.new("console", "rails c"), Pkgr::Process.new("worker", "sidekiq") ] distribution.stub(:runner => runner) runner.stub(templates: [double(:template1), double(:template2)]) templates_by_process = distribution.initializers_for("my-app", processes).group_by{|(a,b)| a} expect(templates_by_process.keys.map(&:name)).to eq(["web", "console", "worker"]) expect(templates_by_process[processes[0]].length).to eq(2) end end describe "#buildpacks" do it "can take an external list of default buildpacks" do config.buildpack_list = fixture("buildpack-list") type, list = distribution.buildpacks expect(list.length).to eq(2) expect(list.all?{|b| b.is_a?(Pkgr::Buildpack)}).to eq(true) expect(list.first.env.to_hash).to eq({ "VENDOR_URL"=>"https://path/to/vendor", "CURL_TIMEOUT"=>"123" }) end it "prioritize buildpack specific environment variables over the global ones" do config.env = Pkgr::Env.new(["VENDOR_URL=http://global/path"]) config.buildpack_list = fixture("buildpack-list") type, list = distribution.buildpacks expect(list.first.env.to_hash["VENDOR_URL"]).to eq("https://path/to/vendor") expect(list.last.env.to_hash["VENDOR_URL"]).to eq("http://global/path") end end end
{ "content_hash": "50b47f1926b20e4acf57ff5aaaae3fe8", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 99, "avg_line_length": 36.793103448275865, "alnum_prop": 0.6494845360824743, "repo_name": "monterail/pkgr", "id": "97d87b7657c84d2f296db580973d5da5f2108367", "size": "2134", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "spec/lib/pkgr/distributions/base_spec.rb", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "2263" }, { "name": "Ruby", "bytes": "101402" }, { "name": "Shell", "bytes": "29352" } ] }
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>com.alibaba.otter</groupId> <artifactId>canal</artifactId> <version>1.0.20-SNAPSHOT</version> <relativePath>../../pom.xml</relativePath> </parent> <groupId>com.alibaba.otter</groupId> <artifactId>canal.instance.core</artifactId> <packaging>jar</packaging> <name>canal instance core module for otter ${project.version}</name> <dependencies> <dependency> <groupId>com.alibaba.otter</groupId> <artifactId>canal.common</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>com.alibaba.otter</groupId> <artifactId>canal.store</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>com.alibaba.otter</groupId> <artifactId>canal.meta</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>com.alibaba.otter</groupId> <artifactId>canal.parse</artifactId> <version>${project.version}</version> </dependency> </dependencies> </project>
{ "content_hash": "147aa00486f67e735ef5f262e096718d", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 204, "avg_line_length": 35.714285714285715, "alnum_prop": 0.7184, "repo_name": "hanl754/canal", "id": "b4bb5428a04b24659848db1c4419db829bee1bc3", "size": "1250", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "instance/core/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1224809" }, { "name": "Protocol Buffer", "bytes": "7829" }, { "name": "Shell", "bytes": "10245" } ] }
import dateutil.relativedelta from django.utils import timezone import mock from nose.tools import * # flake8: noqa import pytest from urlparse import urlparse from api.base.settings.defaults import API_BASE from api_tests.nodes.views.test_node_draft_registration_list import DraftRegistrationTestCase from api_tests.registrations.filters.test_filters import RegistrationListFilteringMixin from framework.auth.core import Auth from osf.models import MetaSchema, DraftRegistration from osf_tests.factories import ( EmbargoFactory, ProjectFactory, RegistrationFactory, AuthUserFactory, CollectionFactory, DraftRegistrationFactory, ) from rest_framework import exceptions from tests.base import ApiTestCase from website.project.metadata.schemas import LATEST_SCHEMA_VERSION from website.views import find_bookmark_collection from osf.utils import permissions class TestRegistrationList(ApiTestCase): def setUp(self): super(TestRegistrationList, self).setUp() self.user = AuthUserFactory() self.project = ProjectFactory(is_public=False, creator=self.user) self.registration_project = RegistrationFactory( creator=self.user, project=self.project) self.url = '/{}registrations/'.format(API_BASE) self.public_project = ProjectFactory(is_public=True, creator=self.user) self.public_registration_project = RegistrationFactory( creator=self.user, project=self.public_project, is_public=True) self.user_two = AuthUserFactory() def test_return_public_registrations_logged_out(self): res = self.app.get(self.url) assert_equal(len(res.json['data']), 1) assert_equal(res.status_code, 200) assert_equal(res.status_code, 200) assert_equal(res.content_type, 'application/vnd.api+json') url = res.json['data'][0]['relationships']['registered_from']['links']['related']['href'] assert_equal( urlparse(url).path, '/{}nodes/{}/'.format(API_BASE, self.public_project._id) ) def test_return_registrations_logged_in_contributor(self): res = self.app.get(self.url, auth=self.user.auth) assert_equal(len(res.json['data']), 2) assert_equal(res.status_code, 200) registered_from_one = urlparse( res.json['data'][0]['relationships']['registered_from']['links']['related']['href']).path registered_from_two = urlparse( res.json['data'][1]['relationships']['registered_from']['links']['related']['href']).path assert_equal(res.content_type, 'application/vnd.api+json') assert_items_equal( [registered_from_one, registered_from_two], ['/{}nodes/{}/'.format(API_BASE, self.public_project._id), '/{}nodes/{}/'.format(API_BASE, self.project._id)] ) def test_return_registrations_logged_in_non_contributor(self): res = self.app.get(self.url, auth=self.user_two.auth) assert_equal(len(res.json['data']), 1) assert_equal(res.status_code, 200) registered_from = urlparse( res.json['data'][0]['relationships']['registered_from']['links']['related']['href']).path assert_equal(res.content_type, 'application/vnd.api+json') assert_equal( registered_from, '/{}nodes/{}/'.format(API_BASE, self.public_project._id)) def test_total_biographic_contributor_in_registration(self): user3 = AuthUserFactory() registration = RegistrationFactory(is_public=True, creator=self.user) registration.add_contributor(self.user_two, auth=Auth(self.user)) registration.add_contributor( user3, auth=Auth(self.user), visible=False) registration.save() registration_url = '/{0}registrations/{1}/?embed=contributors'.format( API_BASE, registration._id) res = self.app.get(registration_url) assert_true( res.json['data']['embeds']['contributors']['links']['meta']['total_bibliographic'] ) assert_equal( res.json['data']['embeds']['contributors']['links']['meta']['total_bibliographic'], 2) def test_exclude_nodes_from_registrations_endpoint(self): res = self.app.get(self.url, auth=self.user.auth) ids = [each['id'] for each in res.json['data']] assert_in(self.registration_project._id, ids) assert_in(self.public_registration_project._id, ids) assert_not_in(self.public_project._id, ids) assert_not_in(self.project._id, ids) class TestRegistrationFiltering(ApiTestCase): def setUp(self): super(TestRegistrationFiltering, self).setUp() self.user_one = AuthUserFactory() self.user_two = AuthUserFactory() self.project_one = ProjectFactory( title="Project One", description='Two', is_public=True, creator=self.user_one, category='hypothesis') self.project_two = ProjectFactory( title="Project Two", description="One Three", is_public=True, creator=self.user_one) self.project_three = ProjectFactory( title="Three", is_public=True, creator=self.user_two) self.private_project_user_one = ProjectFactory( title="Private Project User One", is_public=False, creator=self.user_one) self.private_project_user_two = ProjectFactory( title="Private Project User Two", is_public=False, creator=self.user_two) self.project_one.add_tag('tag1', Auth( self.project_one.creator), save=False) self.project_one.add_tag('tag2', Auth( self.project_one.creator), save=False) self.project_one.save() self.project_two.add_tag('tag1', Auth( self.project_two.creator), save=True) self.project_two.save() self.project_one_reg = RegistrationFactory( creator=self.user_one, project=self.project_one, is_public=True) self.project_two_reg = RegistrationFactory( creator=self.user_one, project=self.project_two, is_public=True) self.project_three_reg = RegistrationFactory( creator=self.user_two, project=self.project_three, is_public=True) self.private_project_user_one_reg = RegistrationFactory( creator=self.user_one, project=self.private_project_user_one, is_public=False) self.private_project_user_two_reg = RegistrationFactory( creator=self.user_two, project=self.private_project_user_two, is_public=False) self.folder = CollectionFactory() self.bookmark_collection = find_bookmark_collection(self.user_one) self.url = "/{}registrations/".format(API_BASE) def test_filtering_by_category(self): url = '/{}registrations/?filter[category]=hypothesis'.format(API_BASE) res = self.app.get(url, auth=self.user_one.auth) registration_json = res.json['data'] ids = [each['id'] for each in registration_json] assert_in(self.project_one_reg._id, ids) assert_not_in(self.project_two_reg._id, ids) assert_not_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) def test_filtering_by_public(self): url = '/{}registrations/?filter[public]=false'.format(API_BASE) res = self.app.get(url, auth=self.user_one.auth) reg_json = res.json['data'] # No public projects returned assert_false( any([each['attributes']['public'] for each in reg_json]) ) ids = [each['id'] for each in reg_json] assert_not_in(self.project_one_reg._id, ids) assert_not_in(self.project_two_reg._id, ids) url = '/{}registrations/?filter[public]=true'.format(API_BASE) res = self.app.get(url, auth=self.user_one.auth) reg_json = res.json['data'] # No private projects returned assert_true( all([each['attributes']['public'] for each in reg_json]) ) ids = [each['id'] for each in reg_json] assert_in(self.project_one_reg._id, ids) assert_in(self.project_two_reg._id, ids) assert_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) def test_filtering_tags(self): # both project_one and project_two have tag1 url = '/{}registrations/?filter[tags]={}'.format(API_BASE, 'tag1') res = self.app.get(url, auth=self.project_one.creator.auth) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_in(self.project_one_reg._id, ids) assert_in(self.project_two_reg._id, ids) assert_not_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) # filtering two tags # project_one has both tags; project_two only has one url = '/{}registrations/?filter[tags]={}&filter[tags]={}'.format( API_BASE, 'tag1', 'tag2') res = self.app.get(url, auth=self.project_one.creator.auth) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_in(self.project_one_reg._id, ids) assert_not_in(self.project_two_reg._id, ids) assert_not_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) def test_filtering_tags_exact(self): self.project_one.add_tag('cats', Auth(self.user_one)) self.project_two.add_tag('cats', Auth(self.user_one)) self.project_one.add_tag('cat', Auth(self.user_one)) self.project_one_reg = RegistrationFactory( creator=self.user_one, project=self.project_one, is_public=True) self.project_two_reg = RegistrationFactory( creator=self.user_one, project=self.project_two, is_public=True) res = self.app.get( '/{}registrations/?filter[tags]=cat'.format( API_BASE ), auth=self.user_one.auth ) assert_equal(len(res.json.get('data')), 1) def test_filtering_tags_capitalized_query(self): self.project_one.add_tag('cat', Auth(self.user_one)) self.project_one_reg = RegistrationFactory( creator=self.user_one, project=self.project_one, is_public=True) res = self.app.get( '/{}registrations/?filter[tags]=CAT'.format( API_BASE ), auth=self.user_one.auth ) assert_equal(len(res.json.get('data')), 1) def test_filtering_tags_capitalized_tag(self): self.project_one.add_tag('CAT', Auth(self.user_one)) self.project_one_reg = RegistrationFactory( creator=self.user_one, project=self.project_one, is_public=True) res = self.app.get( '/{}registrations/?filter[tags]=cat'.format( API_BASE ), auth=self.user_one.auth ) assert_equal(len(res.json.get('data')), 1) def test_filtering_on_multiple_tags(self): self.project_one.add_tag('cat', Auth(self.user_one)) self.project_one.add_tag('sand', Auth(self.user_one)) self.project_one_reg = RegistrationFactory( creator=self.user_one, project=self.project_one, is_public=True) res = self.app.get( '/{}registrations/?filter[tags]=cat&filter[tags]=sand'.format( API_BASE ), auth=self.user_one.auth ) assert_equal(len(res.json.get('data')), 1) def test_filtering_on_multiple_tags_must_match_both(self): self.project_one.add_tag('cat', Auth(self.user_one)) self.project_one_reg = RegistrationFactory( creator=self.user_one, project=self.project_one, is_public=True) res = self.app.get( '/{}registrations/?filter[tags]=cat&filter[tags]=sand'.format( API_BASE ), auth=self.user_one.auth ) assert_equal(len(res.json.get('data')), 0) def test_filtering_tags_returns_distinct(self): # regression test for returning multiple of the same file self.project_one.add_tag('cat', Auth(self.user_one)) self.project_one.add_tag('cAt', Auth(self.user_one)) self.project_one.add_tag('caT', Auth(self.user_one)) self.project_one.add_tag('CAT', Auth(self.user_one)) self.project_one_reg = RegistrationFactory( creator=self.user_one, project=self.project_one, is_public=True) res = self.app.get( '/{}registrations/?filter[tags]=cat'.format( API_BASE ), auth=self.user_one.auth ) assert_equal(len(res.json.get('data')), 1) def test_filtering_contributors(self): res = self.app.get( '/{}registrations/?filter[contributors]={}'.format( API_BASE, self.user_one._id ), auth=self.user_one.auth ) assert_equal(len(res.json.get('data')), 3) def test_filtering_contributors_bad_id(self): res = self.app.get( '/{}registrations/?filter[contributors]=acatdresseduplikeahuman'.format( API_BASE ), auth=self.user_one.auth ) assert_equal(len(res.json.get('data')), 0) def test_get_all_registrations_with_no_filter_logged_in(self): res = self.app.get(self.url, auth=self.user_one.auth) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_in(self.project_one_reg._id, ids) assert_in(self.project_two_reg._id, ids) assert_in(self.project_three_reg._id, ids) assert_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) assert_not_in(self.project_one._id, ids) assert_not_in(self.project_two._id, ids) assert_not_in(self.project_three._id, ids) assert_not_in(self.private_project_user_one._id, ids) assert_not_in(self.private_project_user_two._id, ids) assert_not_in(self.folder._id, ids) assert_not_in(self.bookmark_collection._id, ids) def test_get_all_registrations_with_no_filter_not_logged_in(self): res = self.app.get(self.url) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_in(self.project_one_reg._id, ids) assert_in(self.project_two_reg._id, ids) assert_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) assert_not_in(self.project_one._id, ids) assert_not_in(self.project_two._id, ids) assert_not_in(self.project_three._id, ids) assert_not_in(self.private_project_user_one._id, ids) assert_not_in(self.private_project_user_two._id, ids) assert_not_in(self.folder._id, ids) assert_not_in(self.bookmark_collection._id, ids) def test_get_one_registration_with_exact_filter_logged_in(self): url = "/{}registrations/?filter[title]=Project%20One".format(API_BASE) res = self.app.get(url, auth=self.user_one.auth) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_in(self.project_one_reg._id, ids) assert_not_in(self.project_two_reg._id, ids) assert_not_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) assert_not_in(self.folder._id, ids) assert_not_in(self.bookmark_collection._id, ids) def test_get_one_registration_with_exact_filter_not_logged_in(self): url = "/{}registrations/?filter[title]=Private%20Project%20User%20One".format( API_BASE) res = self.app.get(url) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_not_in(self.project_one_reg._id, ids) assert_not_in(self.project_two_reg._id, ids) assert_not_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) assert_not_in(self.folder._id, ids) assert_not_in(self.bookmark_collection._id, ids) def test_get_some_registrations_with_substring_logged_in(self): url = "/{}registrations/?filter[title]=Two".format(API_BASE) res = self.app.get(url, auth=self.user_one.auth) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_not_in(self.project_one_reg._id, ids) assert_in(self.project_two_reg._id, ids) assert_not_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) assert_not_in(self.folder._id, ids) assert_not_in(self.bookmark_collection._id, ids) def test_get_some_registrations_with_substring_not_logged_in(self): url = "/{}registrations/?filter[title]=One".format(API_BASE) res = self.app.get(url) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_in(self.project_one_reg._id, ids) assert_not_in(self.project_two_reg._id, ids) assert_not_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) assert_not_in(self.folder._id, ids) assert_not_in(self.bookmark_collection._id, ids) def test_get_only_public_or_my_registrations_with_filter_logged_in(self): url = "/{}registrations/?filter[title]=Project".format(API_BASE) res = self.app.get(url, auth=self.user_one.auth) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_in(self.project_one_reg._id, ids) assert_in(self.project_two_reg._id, ids) assert_not_in(self.project_three_reg._id, ids) assert_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) assert_not_in(self.folder._id, ids) assert_not_in(self.bookmark_collection._id, ids) def test_get_only_public_registrations_with_filter_not_logged_in(self): url = "/{}registrations/?filter[title]=Project".format(API_BASE) res = self.app.get(url) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_in(self.project_one_reg._id, ids) assert_in(self.project_two_reg._id, ids) assert_not_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) assert_not_in(self.folder._id, ids) assert_not_in(self.bookmark_collection._id, ids) def test_alternate_filtering_field_logged_in(self): url = "/{}registrations/?filter[description]=Three".format(API_BASE) res = self.app.get(url, auth=self.user_one.auth) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_not_in(self.project_one_reg._id, ids) assert_in(self.project_two_reg._id, ids) assert_not_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) assert_not_in(self.folder._id, ids) assert_not_in(self.bookmark_collection._id, ids) def test_alternate_filtering_field_not_logged_in(self): url = "/{}registrations/?filter[description]=Two".format(API_BASE) res = self.app.get(url) reg_json = res.json['data'] ids = [each['id'] for each in reg_json] assert_in(self.project_one_reg._id, ids) assert_not_in(self.project_two_reg._id, ids) assert_not_in(self.project_three_reg._id, ids) assert_not_in(self.private_project_user_one_reg._id, ids) assert_not_in(self.private_project_user_two_reg._id, ids) assert_not_in(self.folder._id, ids) assert_not_in(self.bookmark_collection._id, ids) def test_incorrect_filtering_field_not_logged_in(self): url = '/{}registrations/?filter[notafield]=bogus'.format(API_BASE) res = self.app.get(url, expect_errors=True) assert_equal(res.status_code, 400) errors = res.json['errors'] assert_equal(len(errors), 1) assert_equal( errors[0]['detail'], "'notafield' is not a valid field for this endpoint.") class TestRegistrationCreate(DraftRegistrationTestCase): @pytest.fixture() def schema(self): return MetaSchema.objects.get( name='Replication Recipe (Brandt et al., 2013): Post-Completion', schema_version=LATEST_SCHEMA_VERSION) @pytest.fixture() def draft_registration(self, user, project_public, schema): return DraftRegistrationFactory( initiator=user, registration_schema=schema, branched_from=project_public, registration_metadata={ 'item29': {'value': 'Yes'}, 'item33': {'value': 'success'} } ) @pytest.fixture() def url_registrations(self, project_public): return '/{}nodes/{}/registrations/'.format( API_BASE, project_public._id) @pytest.fixture() def payload(self, draft_registration): return { 'data': { 'type': 'registrations', 'attributes': { 'draft_registration': draft_registration._id, 'registration_choice': 'immediate' } } } @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_admin_can_create_registration( self, mock_enqueue, app, user, payload, url_registrations): res = app.post_json_api(url_registrations, payload, auth=user.auth) data = res.json['data']['attributes'] assert res.status_code == 201 assert data['registration'] is True assert data['pending_registration_approval'] is True assert data['public'] is False def test_cannot_create_registration( self, app, user_write_contrib, user_read_contrib, payload, url_registrations): # def test_write_only_contributor_cannot_create_registration(self): res = app.post_json_api( url_registrations, payload, auth=user_write_contrib.auth, expect_errors=True) assert res.status_code == 403 # def test_read_only_contributor_cannot_create_registration(self): res = app.post_json_api( url_registrations, payload, auth=user_read_contrib.auth, expect_errors=True) assert res.status_code == 403 # def test_non_authenticated_user_cannot_create_registration(self): res = app.post_json_api(url_registrations, payload, expect_errors=True) assert res.status_code == 401 @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_registration_draft_must_be_specified( self, mock_enqueue, app, user, url_registrations): payload = { 'data': { 'type': 'registrations', 'attributes': { 'registration_choice': 'immediate' } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['source']['pointer'] == '/data/attributes/draft_registration' assert res.json['errors'][0]['detail'] == 'This field is required.' @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_registration_draft_must_be_valid( self, mock_enqueue, app, user, url_registrations): payload = { 'data': { 'type': 'registrations', 'attributes': { 'registration_choice': 'immediate', 'draft_registration': '12345' } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 404 @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_registration_draft_must_be_draft_of_current_node( self, mock_enqueue, app, user, schema, url_registrations): project_new = ProjectFactory(creator=user) draft_registration = DraftRegistrationFactory( initiator=user, registration_schema=schema, branched_from=project_new, registration_metadata={ 'item29': {'value': 'Yes'}, 'item33': {'value': 'success'} } ) payload = { 'data': { 'type': 'registrations', 'attributes': { 'registration_choice': 'immediate', 'draft_registration': draft_registration._id } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'This draft registration is not created from the given node.' @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_required_top_level_questions_must_be_answered_on_draft( self, mock_enqueue, app, user, project_public, prereg_metadata, url_registrations): prereg_schema = MetaSchema.objects.get( name='Prereg Challenge', schema_version=LATEST_SCHEMA_VERSION) prereg_draft_registration = DraftRegistrationFactory( initiator=user, registration_schema=prereg_schema, branched_from=project_public ) registration_metadata = prereg_metadata(prereg_draft_registration) del registration_metadata['q1'] prereg_draft_registration.registration_metadata = registration_metadata prereg_draft_registration.save() payload = { 'data': { 'type': 'registrations', 'attributes': { 'registration_choice': 'immediate', 'draft_registration': prereg_draft_registration._id, } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'u\'q1\' is a required property' @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_required_top_level_questions_must_be_answered_on_draft( self, mock_enqueue, app, user, project_public, prereg_metadata, url_registrations): prereg_schema = MetaSchema.objects.get( name='Prereg Challenge', schema_version=LATEST_SCHEMA_VERSION) prereg_draft_registration = DraftRegistrationFactory( initiator=user, registration_schema=prereg_schema, branched_from=project_public ) registration_metadata = prereg_metadata(prereg_draft_registration) del registration_metadata['q1'] prereg_draft_registration.registration_metadata = registration_metadata prereg_draft_registration.save() payload = { 'data': { 'type': 'registrations', 'attributes': { 'registration_choice': 'immediate', 'draft_registration': prereg_draft_registration._id, } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'u\'q1\' is a required property' @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_required_second_level_questions_must_be_answered_on_draft( self, mock_enqueue, app, user, project_public, prereg_metadata, url_registrations): prereg_schema = MetaSchema.objects.get( name='Prereg Challenge', schema_version=LATEST_SCHEMA_VERSION) prereg_draft_registration = DraftRegistrationFactory( initiator=user, registration_schema=prereg_schema, branched_from=project_public ) registration_metadata = prereg_metadata(prereg_draft_registration) registration_metadata['q11'] = {'value': {}} prereg_draft_registration.registration_metadata = registration_metadata prereg_draft_registration.save() payload = { 'data': { 'type': 'registrations', 'attributes': { 'registration_choice': 'immediate', 'draft_registration': prereg_draft_registration._id, } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'u\'question\' is a required property' @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_required_third_level_questions_must_be_answered_on_draft( self, mock_enqueue, app, user, project_public, prereg_metadata, url_registrations): prereg_schema = MetaSchema.objects.get( name='Prereg Challenge', schema_version=LATEST_SCHEMA_VERSION) prereg_draft_registration = DraftRegistrationFactory( initiator=user, registration_schema=prereg_schema, branched_from=project_public ) registration_metadata = prereg_metadata(prereg_draft_registration) registration_metadata['q11'] = {'value': {"question": {}}} prereg_draft_registration.registration_metadata = registration_metadata prereg_draft_registration.save() payload = { 'data': { 'type': 'registrations', 'attributes': { 'registration_choice': 'immediate', 'draft_registration': prereg_draft_registration._id, } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == '\'value\' is a required property' @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_multiple_choice_in_registration_schema_must_match_one_of_choices( self, mock_enqueue, app, user, project_public, schema, payload, url_registrations): draft_registration = DraftRegistrationFactory( initiator=user, registration_schema=schema, branched_from=project_public, registration_metadata={ 'item29': {'value': 'Yes'}, 'item33': {'value': 'success!'} } ) payload['data']['attributes']['draft_registration'] = draft_registration._id res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert ( res.json['errors'][0]['detail'] == 'u\'success!\' is not one of [u\'success\', u\'informative failure to replicate\',' ' u\'practical failure to replicate\', u\'inconclusive\']') def test_invalid_registration_choice( self, app, user, draft_registration, url_registrations): payload = { 'data': { 'type': 'registrations', 'attributes': { 'draft_registration': draft_registration._id, 'registration_choice': 'tomorrow' } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['source']['pointer'] == '/data/attributes/registration_choice' assert res.json['errors'][0]['detail'] == '"tomorrow" is not a valid choice.' @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_embargo_end_date_provided_if_registration_choice_is_embargo( self, mock_enqueue, app, user, draft_registration, url_registrations): payload = { 'data': { 'type': 'registrations', 'attributes': { 'draft_registration': draft_registration._id, 'registration_choice': 'embargo' } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'lift_embargo must be specified.' @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_embargo_must_be_less_than_four_years( self, mock_enqueue, app, user, draft_registration, url_registrations): today = timezone.now() five_years = ( today + dateutil.relativedelta.relativedelta( years=5)).strftime('%Y-%m-%dT%H:%M:%S') payload = { 'data': { 'type': 'registrations', 'attributes': { 'draft_registration': draft_registration._id, 'registration_choice': 'embargo', 'lift_embargo': five_years } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'Registrations can only be embargoed for up to four years.' @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_embargo_registration( self, mock_enqueue, app, user, draft_registration, url_registrations): today = timezone.now() next_week = ( today + dateutil.relativedelta.relativedelta( months=1)).strftime('%Y-%m-%dT%H:%M:%S') payload = { 'data': { 'type': 'registrations', 'attributes': { 'draft_registration': draft_registration._id, 'registration_choice': 'embargo', 'lift_embargo': next_week } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 201 data = res.json['data']['attributes'] assert data['registration'] is True assert data['pending_embargo_approval'] is True def test_embargo_end_date_must_be_in_the_future( self, app, user, draft_registration, url_registrations): today = timezone.now().strftime('%Y-%m-%dT%H:%M:%S') payload = { 'data': { 'type': 'registrations', 'attributes': { 'draft_registration': draft_registration._id, 'registration_choice': 'embargo', 'lift_embargo': today } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'Embargo end date must be at least three days in the future.' def test_invalid_embargo_end_date_format( self, app, user, draft_registration, url_registrations): today = timezone.now().isoformat() payload = { 'data': { 'type': 'registrations', 'attributes': { 'draft_registration': draft_registration._id, 'registration_choice': 'embargo', 'lift_embargo': today } } } res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'Datetime has wrong format. Use one of these formats instead: YYYY-MM-DDThh:mm:ss.' @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_cannot_register_draft_that_has_already_been_registered( self, mock_enqueue, app, user, payload, draft_registration, url_registrations): draft_registration.register(auth=Auth(user), save=True) res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 403 assert res.json['errors'][0]['detail'] == 'This draft has already been registered and cannot be modified.' @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_cannot_register_draft_that_is_pending_review( self, mock_enqueue, app, user, payload, url_registrations): with mock.patch.object(DraftRegistration, 'is_pending_review', mock.PropertyMock(return_value=True)): res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 403 assert res.json['errors'][0]['detail'] == 'This draft is pending review and cannot be modified.' def test_cannot_register_draft_that_has_already_been_approved( self, app, user, payload, url_registrations): with mock.patch.object(DraftRegistration, 'requires_approval', mock.PropertyMock(return_value=True)), mock.patch.object(DraftRegistration, 'is_approved', mock.PropertyMock(return_value=True)): res = app.post_json_api( url_registrations, payload, auth=user.auth, expect_errors=True) assert res.status_code == 403 assert res.json['errors'][0]['detail'] == 'This draft has already been approved and cannot be modified.' @pytest.mark.django_db class TestRegistrationBulkUpdate: @pytest.fixture() def url(self): return '/{}registrations/'.format(API_BASE) @pytest.fixture() def user(self): return AuthUserFactory() @pytest.fixture() def registration_one(self, user): return RegistrationFactory( creator=user, title='Birds', embargo=EmbargoFactory( user=user), is_public=False) @pytest.fixture() def registration_two(self, user): return RegistrationFactory( creator=user, title='Birds II', embargo=EmbargoFactory( user=user), is_public=False) @pytest.fixture() def private_payload(self, registration_one, registration_two): return { 'data': [ { 'id': registration_one._id, 'type': 'registrations', 'attributes': { 'public': False } }, { 'id': registration_two._id, 'type': 'registrations', 'attributes': { 'public': False } } ] } @pytest.fixture() def public_payload(self, registration_one, registration_two): return { 'data': [ { 'id': registration_one._id, 'type': 'registrations', 'attributes': { 'public': True } }, { 'id': registration_two._id, 'type': 'registrations', 'attributes': { 'public': True } } ] } @pytest.fixture() def empty_payload(self, registration_one, registration_two): return { 'data': [ { 'id': registration_one._id, 'type': 'registrations', 'attributes': {} }, { 'id': registration_two._id, 'type': 'registrations', 'attributes': {} } ] } @pytest.fixture() def bad_payload(self, registration_one, registration_two): return { 'data': [ { 'id': registration_one._id, 'type': 'registrations', 'attributes': { 'public': True, } }, { 'id': registration_two._id, 'type': 'registrations', 'attributes': { 'title': 'Nerds II: Attack of the Nerds', } } ] } def test_bulk_update_errors( self, app, user, registration_one, registration_two, public_payload, private_payload, url): # test_bulk_update_registrations_blank_request res = app.put_json_api( url, auth=user.auth, expect_errors=True, bulk=True) assert res.status_code == 400 # test_bulk_update_registrations_one_not_found payload = {'data': [ { 'id': '12345', 'type': 'registrations', 'attributes': { 'public': True, } }, public_payload['data'][0] ]} res = app.put_json_api( url, payload, auth=user.auth, expect_errors=True, bulk=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'Could not find all objects to update.' # test_bulk_update_registrations_logged_out res = app.put_json_api( url, public_payload, expect_errors=True, bulk=True) assert res.status_code == 401 assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail # test_bulk_update_registrations_logged_in_non_contrib non_contrib = AuthUserFactory() res = app.put_json_api( url, private_payload, auth=non_contrib.auth, expect_errors=True, bulk=True) assert res.status_code == 403 assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail # test_bulk_update_registrations_send_dictionary_not_list res = app.put_json_api( url, { 'data': { 'id': registration_one._id, 'type': 'nodes', 'attributes': {'public': True} } }, auth=user.auth, expect_errors=True, bulk=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'Expected a list of items but got type "dict".' # test_bulk_update_id_not_supplied res = app.put_json_api( url, { 'data': [ public_payload['data'][1], { 'type': 'registrations', 'attributes': {'public': True}} ] }, auth=user.auth, expect_errors=True, bulk=True) assert res.status_code == 400 assert len(res.json['errors']) == 1 assert res.json['errors'][0]['source']['pointer'] == '/data/1/id' assert res.json['errors'][0]['detail'] == "This field may not be null." # test_bulk_update_type_not_supplied res = app.put_json_api( url, { 'data': [ public_payload['data'][1], { 'id': registration_one._id, 'attributes': {'public': True} } ] }, auth=user.auth, expect_errors=True, bulk=True) assert res.status_code == 400 assert len(res.json['errors']) == 1 assert res.json['errors'][0]['source']['pointer'] == '/data/1/type' assert res.json['errors'][0]['detail'] == "This field may not be null." # test_bulk_update_incorrect_type res = app.put_json_api( url, { 'data': [ public_payload['data'][1], { 'id': registration_one._id, 'type': 'Incorrect', 'attributes': {'public': True} } ] }, auth=user.auth, expect_errors=True, bulk=True) assert res.status_code == 409 # test_bulk_update_limits registration_update_list = {'data': [public_payload['data'][0]] * 101} res = app.put_json_api( url, registration_update_list, auth=user.auth, expect_errors=True, bulk=True) assert res.json['errors'][0]['detail'] == 'Bulk operation limit is 100, got 101.' assert res.json['errors'][0]['source']['pointer'] == '/data' # 400 from attempting to make a registration private res = app.put_json_api( url, private_payload, auth=user.auth, bulk=True, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'Registrations can only be turned from private to public.' # Confirm no changes have occured registration_one.refresh_from_db() registration_two.refresh_from_db() assert registration_one.embargo_termination_approval is None assert registration_two.embargo_termination_approval is None assert registration_one.is_public is False assert registration_two.is_public is False assert registration_one.title == 'Birds' assert registration_two.title == 'Birds II' def test_bulk_update_embargo_logged_in_read_only_contrib( self, app, user, registration_one, registration_two, public_payload, url): read_contrib = AuthUserFactory() registration_one.add_contributor( read_contrib, permissions=[ permissions.READ], save=True) registration_two.add_contributor( read_contrib, permissions=[ permissions.READ], save=True) res = app.put_json_api( url, public_payload, auth=read_contrib.auth, expect_errors=True, bulk=True) assert res.status_code == 403 assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail def test_bulk_update_embargo_logged_in_contrib( self, app, user, registration_one, registration_two, public_payload, url): assert registration_one.embargo_termination_approval is None assert registration_two.embargo_termination_approval is None res = app.put_json_api(url, public_payload, auth=user.auth, bulk=True) assert res.status_code == 200 assert ({registration_one._id, registration_two._id} == { res.json['data'][0]['id'], res.json['data'][1]['id']}) # Needs confirmation before it will become public assert res.json['data'][0]['attributes']['public'] is False assert res.json['data'][1]['attributes']['public'] is False registration_one.refresh_from_db() registration_two.refresh_from_db() # registrations should have pending terminations assert registration_one.embargo_termination_approval and registration_one.embargo_termination_approval.is_pending_approval assert registration_two.embargo_termination_approval and registration_two.embargo_termination_approval.is_pending_approval class TestRegistrationListFiltering( RegistrationListFilteringMixin, ApiTestCase): url = '/{}registrations/?'.format(API_BASE)
{ "content_hash": "f28c8b46f8d494b25dcfd9920c0a374a", "timestamp": "", "source": "github", "line_count": 1299, "max_line_length": 200, "avg_line_length": 38.56735950731332, "alnum_prop": 0.5705303499071838, "repo_name": "chennan47/osf.io", "id": "ea9ca519490efb1aff4f578bb3459c7b465b16d4", "size": "50099", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "api_tests/registrations/views/test_registration_list.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "110839" }, { "name": "HTML", "bytes": "236223" }, { "name": "JavaScript", "bytes": "1830647" }, { "name": "Mako", "bytes": "665098" }, { "name": "Python", "bytes": "7650137" }, { "name": "VCL", "bytes": "13885" } ] }
namespace asf = foundation; namespace asr = renderer; asr::ParamArray get_uv_params(Texmap* texmap, const TimeValue time) { asr::ParamArray uv_params; if (texmap == nullptr) return uv_params; UVGen* uv_gen = texmap->GetTheUVGen(); if (!uv_gen || !uv_gen->IsStdUVGen()) return uv_params; StdUVGen* std_uv = static_cast<StdUVGen*>(uv_gen); DbgAssert(texmap->MapSlotType(texmap->GetMapChannel()) == MAPSLOT_TEXTURE); DbgAssert(static_cast<StdUVGen*>(uv_gen)->GetUVWSource() == UVWSRC_EXPLICIT); const float u_tiling = std_uv->GetUScl(time); const float v_tiling = std_uv->GetVScl(time); const float u_offset = std_uv->GetUOffs(time); const float v_offset = std_uv->GetVOffs(time); const float w_rotation = std_uv->GetWAng(time); const int tiling = std_uv->GetTextureTiling(); const int real_world_scale = std_uv->GetUseRealWorldScale(); uv_params.insert("in_real_world_mode", fmt_osl_expr(real_world_scale)); if (tiling & U_WRAP) uv_params.insert("in_wrapU", fmt_osl_expr(1)); else if (tiling & U_MIRROR) uv_params.insert("in_mirrorU", fmt_osl_expr(1)); if (tiling & V_WRAP) uv_params.insert("in_wrapV", fmt_osl_expr(1)); else if (tiling & V_MIRROR) uv_params.insert("in_mirrorV", fmt_osl_expr(1)); uv_params.insert("in_offsetU", fmt_osl_expr(u_offset)); uv_params.insert("in_offsetV", fmt_osl_expr(v_offset)); uv_params.insert("in_tilingU", fmt_osl_expr(u_tiling)); uv_params.insert("in_tilingV", fmt_osl_expr(v_tiling)); uv_params.insert("in_rotateW", fmt_osl_expr(asf::rad_to_deg(w_rotation))); // Access BMTex parameters through parameter block. enum { bmtex_params, bmtex_time }; enum { bmtex_clipu, bmtex_clipv, bmtex_clipw, bmtex_cliph, bmtex_jitter, bmtex_usejitter, bmtex_apply, bmtex_crop_place }; auto pblock = texmap->GetParamBlock(bmtex_params); if (pblock) { const int use_clip = pblock->GetInt(bmtex_apply, time, FOREVER); if (use_clip) { const float clip_u = pblock->GetFloat(bmtex_clipu, time, FOREVER); const float clip_v = pblock->GetFloat(bmtex_clipv, time, FOREVER); const float clip_w = pblock->GetFloat(bmtex_clipw, time, FOREVER); const float clip_h = pblock->GetFloat(bmtex_cliph, time, FOREVER); const int crop_place = pblock->GetInt(bmtex_crop_place, time, FOREVER); uv_params.insert("in_cropU", fmt_osl_expr(clip_u)); uv_params.insert("in_cropV", fmt_osl_expr(clip_v)); uv_params.insert("in_cropW", fmt_osl_expr(clip_w)); uv_params.insert("in_cropH", fmt_osl_expr(clip_h)); if (crop_place) uv_params.insert("in_crop_mode", fmt_osl_expr("place")); else uv_params.insert("in_crop_mode", fmt_osl_expr("crop")); } else uv_params.insert("in_crop_mode", fmt_osl_expr("off")); } return uv_params; } asr::ParamArray get_output_params(Texmap* texmap, const TimeValue time) { asr::ParamArray output_params; output_params.insert("in_multiplier", fmt_osl_expr(1.0f)); output_params.insert("in_clamp_output", fmt_osl_expr(0)); output_params.insert("in_invert", fmt_osl_expr(0)); output_params.insert("in_colorGain", fmt_osl_expr(foundation::Color3f(1.0f))); output_params.insert("in_colorOffset", fmt_osl_expr(foundation::Color3f(0.0f))); output_params.insert("in_alphaGain", fmt_osl_expr(1.0f)); output_params.insert("in_alphaOffset", fmt_osl_expr(0.0f)); output_params.insert("in_alphaIsLuminance", fmt_osl_expr(0)); if (texmap == nullptr) return output_params; StdTexoutGen* std_tex_output = nullptr; for (int i = 0, e = texmap->NumRefs(); i < e; ++i) { ReferenceTarget* ref = texmap->GetReference(i); if (ref != nullptr && ref->SuperClassID() == TEXOUTPUT_CLASS_ID) { std_tex_output = dynamic_cast<StdTexoutGen*>(ref); if (std_tex_output != nullptr) break; } } if (std_tex_output == nullptr) return output_params; output_params.insert("in_multiplier", fmt_osl_expr(std_tex_output->GetOutAmt(time))); output_params.insert("in_clamp_output", fmt_osl_expr(std_tex_output->GetClamp())); output_params.insert("in_invert", fmt_osl_expr(std_tex_output->GetInvert())); output_params.insert("in_colorGain", fmt_osl_expr(foundation::Color3f(std_tex_output->GetRGBAmt(time)))); output_params.insert("in_colorOffset", fmt_osl_expr(foundation::Color3f(std_tex_output->GetRGBOff(time)))); output_params.insert("in_alphaGain", fmt_osl_expr(std_tex_output->GetRGBAmt(time))); output_params.insert("in_alphaOffset", fmt_osl_expr(std_tex_output->GetRGBOff(time))); output_params.insert("in_alphaIsLuminance", fmt_osl_expr(std_tex_output->GetAlphaFromRGB())); return output_params; } std::string fmt_osl_expr(const std::string& s) { return asf::format("string {0}", s); } std::string fmt_osl_expr(const int value) { return asf::format("int {0}", value); } std::string fmt_osl_expr(const float value) { return asf::format("float {0}", value); } std::string fmt_osl_expr(const asf::Color3f& linear_rgb) { return asf::format("color {0} {1} {2}", linear_rgb.r, linear_rgb.g, linear_rgb.b); } std::string fmt_osl_expr(const asf::Vector3f& vector) { return asf::format("vector {0} {1} {2}", vector.x, vector.y, vector.z); } std::string fmt_osl_normal_expr(const asf::Vector3f& normal) { return asf::format("normal {0} {1} {2}", normal.x, normal.y, normal.z); } std::string fmt_osl_point_expr(const asf::Vector3f& point) { return asf::format("point {0} {1} {2}", point.x, point.y, point.z); } std::string fmt_osl_expr(Texmap* texmap) { if (is_bitmap_texture(texmap)) { const auto texture_filepath = wide_to_utf8(static_cast<BitmapTex*>(texmap)->GetMap().GetFullFilePath()); return fmt_osl_expr(texture_filepath); } else return fmt_osl_expr(std::string()); } void connect_float_texture( asr::ShaderGroup& shader_group, const char* material_node_name, const char* material_input_name, Texmap* texmap, const float const_value, const TimeValue time) { if (is_supported_procedural_texture(texmap, false)) { create_supported_texture( shader_group, material_node_name, material_input_name, texmap, const_value, time); return; } if (dynamic_cast<OSLTexture*>(texmap) != nullptr) { OSLTexture* osl_tex = static_cast<OSLTexture*>(texmap); osl_tex->create_osl_texture(shader_group, material_node_name, material_input_name, time); return; } if (is_bitmap_texture(texmap)) { const auto uv_transform_layer_name = asf::format("{0}_{1}_uv_transform", material_node_name, material_input_name); shader_group.add_shader("shader", "as_max_uv_transform", uv_transform_layer_name.c_str(), get_uv_params(texmap, time)); const auto layer_name = asf::format("{0}_{1}_texture", material_node_name, material_input_name); shader_group.add_shader("shader", "as_max_float_texture", layer_name.c_str(), asr::ParamArray() .insert("Filename", fmt_osl_expr(texmap))); asr::ParamArray color_balance_params = get_output_params(texmap, time) .insert("in_constantFloat", fmt_osl_expr(const_value)); const auto color_balance_layer_name = asf::format("{0}_{1}_color_balance", material_node_name, material_input_name); shader_group.add_shader("shader", "as_max_color_balance", color_balance_layer_name.c_str(), color_balance_params); shader_group.add_connection( uv_transform_layer_name.c_str(), "out_U", layer_name.c_str(), "U"); shader_group.add_connection( uv_transform_layer_name.c_str(), "out_V", layer_name.c_str(), "V"); shader_group.add_connection( layer_name.c_str(), "FloatOut", color_balance_layer_name.c_str(), "in_defaultFloat"); shader_group.add_connection( color_balance_layer_name.c_str(), "out_outAlpha", material_node_name, material_input_name); } } void connect_color_texture( asr::ShaderGroup& shader_group, const char* material_node_name, const char* material_input_name, Texmap* texmap, const Color const_color, const TimeValue time) { if (is_supported_procedural_texture(texmap, false)) { create_supported_texture( shader_group, material_node_name, material_input_name, texmap, const_color, time); return; } if (dynamic_cast<OSLTexture*>(texmap) != nullptr) { OSLTexture* osl_tex = static_cast<OSLTexture*>(texmap); osl_tex->create_osl_texture(shader_group, material_node_name, material_input_name, time); return; } if (is_bitmap_texture(texmap)) { const auto uv_transform_layer_name = asf::format("{0}_{1}_uv_transform", material_node_name, material_input_name); shader_group.add_shader("shader", "as_max_uv_transform", uv_transform_layer_name.c_str(), get_uv_params(texmap, time)); if (!is_linear_texture(static_cast<BitmapTex*>(texmap))) { const auto texture_layer_name = asf::format("{0}_{1}_texture", material_node_name, material_input_name); shader_group.add_shader("shader", "as_max_color_texture", texture_layer_name.c_str(), asr::ParamArray() .insert("Filename", fmt_osl_expr(texmap))); const auto srgb_to_linear_layer_name = asf::format("{0}_{1}_srgb_to_linear", material_node_name, material_input_name); shader_group.add_shader("shader", "as_max_srgb_to_linear_rgb", srgb_to_linear_layer_name.c_str(), asr::ParamArray()); asr::ParamArray color_balance_params = get_output_params(texmap, time) .insert("in_constantColor", fmt_osl_expr(to_color3f(const_color))); const auto color_balance_layer_name = asf::format("{0}_{1}_color_balance", material_node_name, material_input_name); shader_group.add_shader("shader", "as_max_color_balance", color_balance_layer_name.c_str(), color_balance_params); shader_group.add_connection( uv_transform_layer_name.c_str(), "out_U", texture_layer_name.c_str(), "U"); shader_group.add_connection( uv_transform_layer_name.c_str(), "out_V", texture_layer_name.c_str(), "V"); shader_group.add_connection( texture_layer_name.c_str(), "ColorOut", srgb_to_linear_layer_name.c_str(), "ColorIn"); shader_group.add_connection( srgb_to_linear_layer_name.c_str(), "ColorOut", color_balance_layer_name.c_str(), "in_defaultColor"); shader_group.add_connection( color_balance_layer_name.c_str(), "out_outColor", material_node_name, material_input_name); } else { const auto texture_layer_name = asf::format("{0}_{1}_texture", material_node_name, material_input_name); shader_group.add_shader("shader", "as_max_color_texture", texture_layer_name.c_str(), asr::ParamArray() .insert("Filename", fmt_osl_expr(texmap))); asr::ParamArray color_balance_params = get_output_params(texmap, time) .insert("in_constantColor", fmt_osl_expr(to_color3f(const_color))); const auto color_balance_layer_name = asf::format("{0}_{1}_color_balance", material_node_name, material_input_name); shader_group.add_shader("shader", "as_max_color_balance", color_balance_layer_name.c_str(), color_balance_params); shader_group.add_connection( uv_transform_layer_name.c_str(), "out_U", texture_layer_name.c_str(), "U"); shader_group.add_connection( uv_transform_layer_name.c_str(), "out_V", texture_layer_name.c_str(), "V"); shader_group.add_connection( texture_layer_name.c_str(), "ColorOut", color_balance_layer_name.c_str(), "in_defaultColor"); shader_group.add_connection( color_balance_layer_name.c_str(), "out_outColor", material_node_name, material_input_name); } } } void connect_bump_map( asr::ShaderGroup& shader_group, const char* material_node_name, const char* material_normal_input_name, const char* material_tn_input_name, Texmap* texmap, const float amount, const TimeValue time) { if (is_supported_procedural_texture(texmap, false) || dynamic_cast<OSLTexture*>(texmap) != nullptr) { auto bump_map_layer_name = asf::format("{0}_bump_map", material_node_name); connect_float_texture( shader_group, bump_map_layer_name.c_str(), "Height", texmap, 1.0f, time); shader_group.add_shader("shader", "as_max_bump_map", bump_map_layer_name.c_str(), asr::ParamArray() .insert("Amount", fmt_osl_expr(amount))); shader_group.add_connection( bump_map_layer_name.c_str(), "NormalOut", material_node_name, material_normal_input_name); return; } if (is_bitmap_texture(texmap)) { auto uv_transform_layer_name = asf::format("{0}_bump_uv_transform", material_node_name); shader_group.add_shader("shader", "as_max_uv_transform", uv_transform_layer_name.c_str(), get_uv_params(texmap, time)); auto texture_layer_name = asf::format("{0}_bump_map_texture", material_node_name); shader_group.add_shader("shader", "as_max_float_texture", texture_layer_name.c_str(), asr::ParamArray() .insert("Filename", fmt_osl_expr(texmap))); auto bump_map_layer_name = asf::format("{0}_bump_map", material_node_name); shader_group.add_shader("shader", "as_max_bump_map", bump_map_layer_name.c_str(), asr::ParamArray() .insert("Amount", fmt_osl_expr(amount))); shader_group.add_connection( uv_transform_layer_name.c_str(), "out_U", texture_layer_name.c_str(), "U"); shader_group.add_connection( uv_transform_layer_name.c_str(), "out_V", texture_layer_name.c_str(), "V"); shader_group.add_connection( texture_layer_name.c_str(), "FloatOut", bump_map_layer_name.c_str(), "Height"); shader_group.add_connection( bump_map_layer_name.c_str(), "NormalOut", material_node_name, material_normal_input_name); } } void connect_normal_map( asr::ShaderGroup& shader_group, const char* material_node_name, const char* material_normal_input_name, const char* material_tn_input_name, Texmap* texmap, const int up_vector, const float amount, const TimeValue time) { if (is_supported_procedural_texture(texmap, false) || dynamic_cast<OSLTexture*>(texmap) != nullptr) { auto normal_map_layer_name = asf::format("{0}_normal_map", material_node_name); connect_color_texture( shader_group, normal_map_layer_name.c_str(), "Color", texmap, Color(1.0f, 1.0f, 1.0f), time); shader_group.add_shader("shader", "as_max_normal_map", normal_map_layer_name.c_str(), asr::ParamArray() .insert("UpVector", fmt_osl_expr(up_vector == 0 ? "Green" : "Blue")) .insert("Amount", fmt_osl_expr(amount))); shader_group.add_connection( normal_map_layer_name.c_str(), "NormalOut", material_node_name, material_normal_input_name); shader_group.add_connection( normal_map_layer_name.c_str(), "TangentOut", material_node_name, material_tn_input_name); return; } if (is_bitmap_texture(texmap)) { auto uv_transform_layer_name = asf::format("{0}_bump_uv_transform", material_node_name); shader_group.add_shader("shader", "as_max_uv_transform", uv_transform_layer_name.c_str(), get_uv_params(texmap, time)); auto texture_layer_name = asf::format("{0}_normal_map_texture", material_node_name); shader_group.add_shader("shader", "as_max_color_texture", texture_layer_name.c_str(), asr::ParamArray() .insert("Filename", fmt_osl_expr(texmap))); auto normal_map_layer_name = asf::format("{0}_normal_map", material_node_name); shader_group.add_shader("shader", "as_max_normal_map", normal_map_layer_name.c_str(), asr::ParamArray() .insert("UpVector", fmt_osl_expr(up_vector == 0 ? "Green" : "Blue")) .insert("Amount", fmt_osl_expr(amount))); shader_group.add_connection( uv_transform_layer_name.c_str(), "out_U", texture_layer_name.c_str(), "U"); shader_group.add_connection( uv_transform_layer_name.c_str(), "out_V", texture_layer_name.c_str(), "V"); shader_group.add_connection( texture_layer_name.c_str(), "ColorOut", normal_map_layer_name.c_str(), "Color"); shader_group.add_connection( normal_map_layer_name.c_str(), "NormalOut", material_node_name, material_normal_input_name); shader_group.add_connection( normal_map_layer_name.c_str(), "TangentOut", material_node_name, material_tn_input_name); } } void connect_sub_mtl( asr::Assembly& assembly, asr::ShaderGroup& shader_group, const char* shader_name, const char* shader_input, Mtl* mat, const TimeValue time) { auto appleseed_mtl = static_cast<IAppleseedMtl*>(mat->GetInterface(IAppleseedMtl::interface_id())); if (!appleseed_mtl) return; std::string layer_name = make_unique_name(assembly.materials(), asf::format("{0}_{1}_sub_mat", shader_name, mat->GetName())); assembly.materials().insert(appleseed_mtl->create_material( assembly, layer_name.c_str(), false, time)); asr::Material* layer_material = assembly.materials().get_by_name(layer_name.c_str()); if (!layer_material->get_parameters().exist_path("osl_surface")) return; auto shader_group_name = layer_material->get_parameters().get("osl_surface"); asr::ShaderGroup* mtl_group = assembly.shader_groups().get_by_name(shader_group_name); // Don't copy last shader and last connection for (auto shader = mtl_group->shaders().begin(); shader != --(mtl_group->shaders().end()); shader++) { shader_group.add_shader(shader->get_type(), shader->get_shader(), shader->get_layer(), shader->get_parameters()); } for (auto conn = mtl_group->shader_connections().begin(); conn != --(mtl_group->shader_connections().end()); conn++) { shader_group.add_connection(conn->get_src_layer(), conn->get_src_param(), conn->get_dst_layer(), conn->get_dst_param()); } auto last_conn = mtl_group->shader_connections().get_by_index(mtl_group->shader_connections().size() - 1); shader_group.add_connection(layer_name.c_str(), last_conn->get_src_param(), shader_name, shader_input); } void create_osl_shader( renderer::Assembly* assembly, asr::ShaderGroup& shader_group, const char* layer_name, IParamBlock2* param_block, const OSLShaderInfo* shader_info, const TimeValue time) { asr::ParamArray params; for (const auto& param_info : shader_info->m_params) { const MaxParam& max_param = param_info.m_max_param; Texmap* texmap = nullptr; if (max_param.m_is_connectable && max_param.m_param_type != MaxParam::Closure) { param_block->GetValue(max_param.m_max_map_param_id, time, texmap, FOREVER); if (texmap != nullptr) { switch (max_param.m_param_type) { case MaxParam::Float: { const float constant_value = max_param.m_is_constant ? param_block->GetFloat(max_param.m_max_param_id, time, FOREVER) : 1.0f; connect_float_texture( shader_group, layer_name, max_param.m_osl_param_name.c_str(), texmap, constant_value, time); } break; case MaxParam::Color: { const Color constant_color = max_param.m_is_constant ? param_block->GetColor(max_param.m_max_param_id, time, FOREVER) : Color(1.0, 1.0, 1.0); connect_color_texture( shader_group, layer_name, max_param.m_osl_param_name.c_str(), texmap, constant_color, time); } break; case MaxParam::VectorParam: case MaxParam::NormalParam: case MaxParam::PointParam: { if (dynamic_cast<OSLTexture*>(texmap) != nullptr) { OSLTexture* osl_tex = static_cast<OSLTexture*>(texmap); osl_tex->create_osl_texture( shader_group, layer_name, max_param.m_osl_param_name.c_str(), time); } } break; } } } if (max_param.m_is_connectable && max_param.m_param_type == MaxParam::Closure) { Mtl* material = nullptr; param_block->GetValue(max_param.m_max_map_param_id, time, material, FOREVER); if (material != nullptr && assembly != nullptr) { connect_sub_mtl( *assembly, shader_group, layer_name, max_param.m_osl_param_name.c_str(), material, time); } } if (max_param.m_is_constant) { switch (max_param.m_param_type) { case MaxParam::Float: { const float param_value = param_block->GetFloat(max_param.m_max_param_id, time, FOREVER); params.insert(max_param.m_osl_param_name.c_str(), fmt_osl_expr(param_value)); } break; case MaxParam::IntNumber: case MaxParam::IntCheckbox: case MaxParam::IntMapper: { const int param_value = param_block->GetInt(max_param.m_max_param_id, time, FOREVER); params.insert(max_param.m_osl_param_name.c_str(), fmt_osl_expr(param_value)); } break; case MaxParam::Color: { const auto param_value = param_block->GetColor(max_param.m_max_param_id, time); params.insert(max_param.m_osl_param_name.c_str(), fmt_osl_expr(to_color3f(param_value))); } break; case MaxParam::VectorParam: { const Point3 param_value = param_block->GetPoint3(max_param.m_max_param_id, time); params.insert(max_param.m_osl_param_name.c_str(), fmt_osl_expr(to_vector3f(param_value))); } break; case MaxParam::NormalParam: { const Point3 param_value = param_block->GetPoint3(max_param.m_max_param_id, time); params.insert(max_param.m_osl_param_name.c_str(), fmt_osl_normal_expr(to_vector3f(param_value))); } break; case MaxParam::PointParam: { const Point3 param_value = param_block->GetPoint3(max_param.m_max_param_id, time); params.insert(max_param.m_osl_param_name.c_str(), fmt_osl_point_expr(to_vector3f(param_value))); } break; case MaxParam::StringPopup: { std::vector<std::string> fields; asf::tokenize(param_info.m_options, "|", fields); const int param_value = param_block->GetInt(max_param.m_max_param_id, time, FOREVER); params.insert(max_param.m_osl_param_name.c_str(), fmt_osl_expr(fields[param_value])); } break; case MaxParam::String: { const wchar_t* str_value; param_block->GetValue(max_param.m_max_param_id, time, str_value, FOREVER); if (str_value != nullptr) params.insert(max_param.m_osl_param_name.c_str(), fmt_osl_expr(wide_to_utf8(str_value))); } break; } } } shader_group.add_shader("shader", shader_info->m_shader_name.c_str(), layer_name, params); }
{ "content_hash": "2566aa67ed6e4c75e92811ef1ecf37f8", "timestamp": "", "source": "github", "line_count": 681, "max_line_length": 140, "avg_line_length": 38.72246696035242, "alnum_prop": 0.5600303375047402, "repo_name": "usakhelo/appleseed-max", "id": "ed288272b9ebea84aaeb74d585b27356b03529ca", "size": "28490", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/appleseed-max-impl/oslutils.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "7848" }, { "name": "C", "bytes": "76631" }, { "name": "C++", "bytes": "910770" } ] }
package stellar.io; import java.io.File; import java.io.IOException; import stellar.data.GroupRecord; import stellar.data.GroupType; import stellar.data.HexID; import java.io.StreamTokenizer; /** * This class reads the .SEC files as found on http://maps.travellercentral.com . * These files are maintained by Anthony Jackson for the purpose of generating * maps and analysis using the GURPS Traveller: Far Trader trade rules. */ public class SECFileReader extends SectorTextReader { public SECFileReader (String file) { super(file); } public SECFileReader (File file) { super(file); } public void readHeader() throws IOException, SECFileStateMachineException { sector = new GroupRecord(); sector.setType(GroupType.SECTOR); //parser.commentChar('#'); parser.nextToken(); // initial name if (parser.ttype == StreamTokenizer.TT_WORD) { if (parser.sval.equals("Name")) // generic .sec file { readGENheader(); } else // Anthony .sec file { readAJheader(); } } else if (parser.ttype == StreamTokenizer.TT_NUMBER) { } else if (parser.ttype == StreamTokenizer.TT_EOL) { } else { /* if the first character is "#", this is a Galactic sector file * We assume... */ if ((char)parser.ttype == '#') { readGALheader(); } } if (sector.getName() == null) { throw new SECFileStateMachineException ("No sector name found in .sec file, probably due to unknown header configuration. Please contact support"); } sector.setKey(sector.getName().substring(0,4).toLowerCase() + ".0000"); sector.setValue(sector.getName()); sector.setProvider(data.getDefaultProvider()); sector.getLocation().setHexGroup(sector); sector.setExtentX(32); sector.setExtentY(40); data.addGroup(sector); } public void readLinks() { } /* Galactic .sec files have a 5 line comment header * 1: sector name / galaxy name * 2: Blank * 3: marker line with character positions * 4: column header line * 5: marker line */ public void readGALheader() throws IOException { boolean found = false; parser.nextToken(); sector.setName(parser.sval); parser.nextToken(); while ((char)parser.ttype != '/' && parser.ttype != StreamTokenizer.TT_EOL) { if (parser.ttype == StreamTokenizer.TT_WORD) sector.setName(sector.getName() + " " + parser.sval); else if (parser.ttype == StreamTokenizer.TT_NUMBER) sector.setName(sector.getName() + " " + Double.toString(parser.nval)); else sector.setName(sector.getName().concat(Character.toString((char)parser.ttype))); parser.nextToken(); } while (!found) { parser.nextToken(); found = (parser.ttype == StreamTokenizer.TT_EOL); } sector.setLocation(new HexID (1,1)); parser.commentChar('#'); } /* Anthony's .sec files have a three line * header, consiting of a sector name, a x/y position * and a line of numbers to id the location of * the important data elements he uses for the trade * maps. */ public void readAJheader() throws IOException { boolean found = false; int x, y; sector.setName(parser.sval); parser.nextToken(); // carriage return while (parser.ttype != StreamTokenizer.TT_EOL) { sector.setName(sector.getName() + " " + parser.sval); parser.nextToken(); } parser.nextToken(); // x coord x = (int)parser.nval; parser.nextToken(); // y coord y = (int)parser.nval; sector.setLocation(new HexID(x,y)); sector.getLocation().setHexType(GroupType.GROUP); parser.nextToken(); //CRLF while (!found) { parser.nextToken(); found = (parser.ttype == StreamTokenizer.TT_EOL); } parser.commentChar('#'); } /* This is a generic, unnamed and widely available .sec file and header * The header looks like below, as we are using the initial "Name" token * to identifiy it. We parse throught it looking for thr four dots on the * ruler line. Name UWP Bases Codes PBG Allegiance/Stellar Data 1-13: Name 15-18: HexNbr 20-28: UWP 31: Bases 33-47: Codes & Comments 49: Zone 49: Zone 52-54: PBG 56-57: Allegiance 59-74: Stellar Data ....+....1....+....2....+....3....+....4....+....5....+....6....+....7....+....8 */ public void readGENheader() throws IOException { boolean found = false; int dotCount = 0; sector.setName("Unknown"); //read until the four dots are found //parser interprets a lone '.' as a number of a value 0.0 while (!found) { parser.nextToken(); if (parser.ttype == StreamTokenizer.TT_NUMBER) dotCount++; else dotCount = 0; if (dotCount == 4) found = true; } found = false; // read until the end of the line. while (!found) { parser.nextToken(); found = (parser.ttype == StreamTokenizer.TT_EOL); } sector.setLocation(new HexID (1,1)); } public static void main(String[] args) { SECFileReader readSECFile = new SECFileReader("C:\\thom\\projects\\cartography\\fornast.sec"); try { readSECFile.read(); } catch (Exception ex) { ex.printStackTrace(); } } }
{ "content_hash": "fa0bedc24703e265042a7ac7d859190d", "timestamp": "", "source": "github", "line_count": 186, "max_line_length": 159, "avg_line_length": 32.935483870967744, "alnum_prop": 0.5439111981717271, "repo_name": "makhidkarun/cartography", "id": "43da3725acc5f44c5e33642e3a3a72bbca9fb339", "size": "6126", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/stellar/io/SECFileReader.java", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "4351" }, { "name": "CSS", "bytes": "1684" }, { "name": "HTML", "bytes": "46258" }, { "name": "Java", "bytes": "673181" } ] }
package com.thinkgem.jeesite.common.utils.excel; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletResponse; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.vfs.FileObject; import org.apache.commons.vfs.FileSystemManager; import org.apache.commons.vfs.VFS; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.CellStyle; import org.apache.poi.ss.usermodel.Comment; import org.apache.poi.ss.usermodel.DataFormat; import org.apache.poi.ss.usermodel.Font; import org.apache.poi.ss.usermodel.IndexedColors; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.util.CellRangeAddress; import org.apache.poi.xssf.streaming.SXSSFWorkbook; import org.apache.poi.xssf.usermodel.XSSFClientAnchor; import org.apache.poi.xssf.usermodel.XSSFRichTextString; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import com.thinkgem.jeesite.common.utils.Encodes; import com.thinkgem.jeesite.common.utils.FileUtils; import com.thinkgem.jeesite.common.utils.Reflections; import com.thinkgem.jeesite.common.utils.excel.annotation.ExcelField; import com.thinkgem.jeesite.modules.sys.utils.DictUtils; /** * 导出Excel文件(导出“XLSX”格式,支持大数据量导出 @see org.apache.poi.ss.SpreadsheetVersion) * @author ThinkGem * @version 2013-04-21 */ public class ExportExcel { private static Logger log = LoggerFactory.getLogger(ExportExcel.class); /** * 工作薄对象 */ private SXSSFWorkbook wb; /** * 工作表对象 */ private Sheet sheet; /** * 样式列表 */ private Map<String, CellStyle> styles; /** * 当前行号 */ private int rownum; /** * 注解列表(Object[]{ ExcelField, Field/Method }) */ List<Object[]> annotationList = Lists.newArrayList(); /** * 构造函数 * @param title 表格标题,传“空值”,表示无标题 * @param cls 实体对象,通过annotation.ExportField获取标题 */ public ExportExcel(String title, Class<?> cls){ this(title, cls, 1); } /** * 构造函数 * @param title 表格标题,传“空值”,表示无标题 * @param cls 实体对象,通过annotation.ExportField获取标题 * @param type 导出类型(1:导出数据;2:导出模板) * @param groups 导入分组 */ public ExportExcel(String title, Class<?> cls, int type, int... groups){ // Get annotation field Field[] fs = cls.getDeclaredFields(); for (Field f : fs){ ExcelField ef = f.getAnnotation(ExcelField.class); if (ef != null && (ef.type()==0 || ef.type()==type)){ if (groups!=null && groups.length>0){ boolean inGroup = false; for (int g : groups){ if (inGroup){ break; } for (int efg : ef.groups()){ if (g == efg){ inGroup = true; annotationList.add(new Object[]{ef, f}); break; } } } }else{ annotationList.add(new Object[]{ef, f}); } } } // Get annotation method Method[] ms = cls.getDeclaredMethods(); for (Method m : ms){ ExcelField ef = m.getAnnotation(ExcelField.class); if (ef != null && (ef.type()==0 || ef.type()==type)){ if (groups!=null && groups.length>0){ boolean inGroup = false; for (int g : groups){ if (inGroup){ break; } for (int efg : ef.groups()){ if (g == efg){ inGroup = true; annotationList.add(new Object[]{ef, m}); break; } } } }else{ annotationList.add(new Object[]{ef, m}); } } } // Field sorting Collections.sort(annotationList, new Comparator<Object[]>() { public int compare(Object[] o1, Object[] o2) { return new Integer(((ExcelField)o1[0]).sort()).compareTo( new Integer(((ExcelField)o2[0]).sort())); }; }); // Initialize List<String> headerList = Lists.newArrayList(); for (Object[] os : annotationList){ String t = ((ExcelField)os[0]).title(); // 如果是导出,则去掉注释 if (type==1){ String[] ss = StringUtils.split(t, "**", 2); if (ss.length==2){ t = ss[0]; } } headerList.add(t); } initialize(title, headerList); } /** * 构造函数 * @param title 表格标题,传“空值”,表示无标题 * @param headers 表头数组 */ public ExportExcel(String title, String[] headers) { initialize(title, Lists.newArrayList(headers)); } /** * 构造函数 * @param title 表格标题,传“空值”,表示无标题 * @param headerList 表头列表 */ public ExportExcel(String title, List<String> headerList) { initialize(title, headerList); } /** * 初始化函数 * @param title 表格标题,传“空值”,表示无标题 * @param headerList 表头列表 */ private void initialize(String title, List<String> headerList) { this.wb = new SXSSFWorkbook(500); this.sheet = wb.createSheet("Export"); this.styles = createStyles(wb); // Create title if (StringUtils.isNotBlank(title)){ Row titleRow = sheet.createRow(rownum++); titleRow.setHeightInPoints(30); Cell titleCell = titleRow.createCell(0); titleCell.setCellStyle(styles.get("title")); titleCell.setCellValue(title); sheet.addMergedRegion(new CellRangeAddress(titleRow.getRowNum(), titleRow.getRowNum(), titleRow.getRowNum(), headerList.size()-1)); } // Create header if (headerList == null){ throw new RuntimeException("headerList not null!"); } Row headerRow = sheet.createRow(rownum++); headerRow.setHeightInPoints(16); for (int i = 0; i < headerList.size(); i++) { Cell cell = headerRow.createCell(i); cell.setCellStyle(styles.get("header")); String[] ss = StringUtils.split(headerList.get(i), "**", 2); if (ss.length==2){ cell.setCellValue(ss[0]); Comment comment = this.sheet.createDrawingPatriarch().createCellComment( new XSSFClientAnchor(0, 0, 0, 0, (short) 3, 3, (short) 5, 6)); comment.setString(new XSSFRichTextString(ss[1])); cell.setCellComment(comment); }else{ cell.setCellValue(headerList.get(i)); } sheet.autoSizeColumn(i); } for (int i = 0; i < headerList.size(); i++) { int colWidth = sheet.getColumnWidth(i)*2; sheet.setColumnWidth(i, colWidth < 3000 ? 3000 : colWidth); } log.debug("Initialize success."); } /** * 创建表格样式 * @param wb 工作薄对象 * @return 样式列表 */ private Map<String, CellStyle> createStyles(Workbook wb) { Map<String, CellStyle> styles = new HashMap<String, CellStyle>(); CellStyle style = wb.createCellStyle(); style.setAlignment(CellStyle.ALIGN_CENTER); style.setVerticalAlignment(CellStyle.VERTICAL_CENTER); Font titleFont = wb.createFont(); titleFont.setFontName("Arial"); titleFont.setFontHeightInPoints((short) 16); titleFont.setBoldweight(Font.BOLDWEIGHT_BOLD); style.setFont(titleFont); styles.put("title", style); style = wb.createCellStyle(); style.setVerticalAlignment(CellStyle.VERTICAL_CENTER); style.setBorderRight(CellStyle.BORDER_THIN); style.setRightBorderColor(IndexedColors.GREY_50_PERCENT.getIndex()); style.setBorderLeft(CellStyle.BORDER_THIN); style.setLeftBorderColor(IndexedColors.GREY_50_PERCENT.getIndex()); style.setBorderTop(CellStyle.BORDER_THIN); style.setTopBorderColor(IndexedColors.GREY_50_PERCENT.getIndex()); style.setBorderBottom(CellStyle.BORDER_THIN); style.setBottomBorderColor(IndexedColors.GREY_50_PERCENT.getIndex()); Font dataFont = wb.createFont(); dataFont.setFontName("Arial"); dataFont.setFontHeightInPoints((short) 10); style.setFont(dataFont); styles.put("data", style); style = wb.createCellStyle(); style.cloneStyleFrom(styles.get("data")); style.setAlignment(CellStyle.ALIGN_LEFT); styles.put("data1", style); style = wb.createCellStyle(); style.cloneStyleFrom(styles.get("data")); style.setAlignment(CellStyle.ALIGN_CENTER); styles.put("data2", style); style = wb.createCellStyle(); style.cloneStyleFrom(styles.get("data")); style.setAlignment(CellStyle.ALIGN_RIGHT); styles.put("data3", style); style = wb.createCellStyle(); style.cloneStyleFrom(styles.get("data")); // style.setWrapText(true); style.setAlignment(CellStyle.ALIGN_CENTER); style.setFillForegroundColor(IndexedColors.GREY_50_PERCENT.getIndex()); style.setFillPattern(CellStyle.SOLID_FOREGROUND); Font headerFont = wb.createFont(); headerFont.setFontName("Arial"); headerFont.setFontHeightInPoints((short) 10); headerFont.setBoldweight(Font.BOLDWEIGHT_BOLD); headerFont.setColor(IndexedColors.WHITE.getIndex()); style.setFont(headerFont); styles.put("header", style); return styles; } /** * 添加一行 * @return 行对象 */ public Row addRow(){ return sheet.createRow(rownum++); } /** * 添加一个单元格 * @param row 添加的行 * @param column 添加列号 * @param val 添加值 * @return 单元格对象 */ public Cell addCell(Row row, int column, Object val){ return this.addCell(row, column, val, 0, Class.class); } /** * 添加一个单元格 * @param row 添加的行 * @param column 添加列号 * @param val 添加值 * @param align 对齐方式(1:靠左;2:居中;3:靠右) * @return 单元格对象 */ public Cell addCell(Row row, int column, Object val, int align, Class<?> fieldType){ Cell cell = row.createCell(column); CellStyle style = styles.get("data"+(align>=1&&align<=3?align:"")); try { if (val == null){ cell.setCellValue(""); } else if (val instanceof String) { cell.setCellValue((String) val); } else if (val instanceof Integer) { cell.setCellValue((Integer) val); } else if (val instanceof Long) { cell.setCellValue((Long) val); } else if (val instanceof Double) { cell.setCellValue((Double) val); } else if (val instanceof Float) { cell.setCellValue((Float) val); } else if (val instanceof Date) { DataFormat format = wb.createDataFormat(); style.setDataFormat(format.getFormat("yyyy-MM-dd")); cell.setCellValue((Date) val); } else { if (fieldType != Class.class){ cell.setCellValue((String)fieldType.getMethod("setValue", Object.class).invoke(null, val)); }else{ cell.setCellValue((String)Class.forName(this.getClass().getName().replaceAll(this.getClass().getSimpleName(), "fieldtype."+val.getClass().getSimpleName()+"Type")).getMethod("setValue", Object.class).invoke(null, val)); } } } catch (Exception ex) { log.info("Set cell value ["+row.getRowNum()+","+column+"] error: " + ex.toString()); cell.setCellValue(val.toString()); } cell.setCellStyle(style); return cell; } /** * 添加数据(通过annotation.ExportField添加数据) * @return list 数据列表 */ public <E> ExportExcel setDataList(List<E> list){ for (E e : list){ int colunm = 0; Row row = this.addRow(); StringBuilder sb = new StringBuilder(); for (Object[] os : annotationList){ ExcelField ef = (ExcelField)os[0]; Object val = null; // Get entity value try{ if (StringUtils.isNotBlank(ef.value())){ val = Reflections.invokeGetter(e, ef.value()); }else{ if (os[1] instanceof Field){ val = Reflections.invokeGetter(e, ((Field)os[1]).getName()); }else if (os[1] instanceof Method){ val = Reflections.invokeMethod(e, ((Method)os[1]).getName(), new Class[] {}, new Object[] {}); } } // If is dict, get dict label if (StringUtils.isNotBlank(ef.dictType())){ val = DictUtils.getDictLabel(val==null?"":val.toString(), ef.dictType(), ""); } }catch(Exception ex) { // Failure to ignore log.info(ex.toString()); val = ""; } this.addCell(row, colunm++, val, ef.align(), ef.fieldType()); sb.append(val + ", "); } log.debug("Write success: ["+row.getRowNum()+"] "+sb.toString()); } return this; } /** * 输出数据流 * @param os 输出数据流 */ public ExportExcel write(OutputStream os) throws IOException{ wb.write(os); return this; } /** * 输出到客户端 * @param fileName 输出文件名 */ public ExportExcel write(HttpServletResponse response, String fileName) throws IOException{ response.reset(); response.setContentType("application/octet-stream; charset=utf-8"); response.setHeader("Content-Disposition", "attachment; filename="+Encodes.urlEncode(fileName)); write(response.getOutputStream()); return this; } /** * 输出到文件 * @param fileName 输出文件名 */ public ExportExcel writeFile(String name) throws FileNotFoundException, IOException{ FileOutputStream os = new FileOutputStream(name); this.write(os); return this; } /** * 清理临时文件 */ public ExportExcel dispose(){ wb.dispose(); return this; } // /** // * 导出测试 // */ // public static void main(String[] args) throws Throwable { // // List<String> headerList = Lists.newArrayList(); // for (int i = 1; i <= 10; i++) { // headerList.add("表头"+i); // } // // List<String> dataRowList = Lists.newArrayList(); // for (int i = 1; i <= headerList.size(); i++) { // dataRowList.add("数据"+i); // } // // List<List<String>> dataList = Lists.newArrayList(); // for (int i = 1; i <=1000000; i++) { // dataList.add(dataRowList); // } // // ExportExcel ee = new ExportExcel("表格标题", headerList); // // for (int i = 0; i < dataList.size(); i++) { // Row row = ee.addRow(); // for (int j = 0; j < dataList.get(i).size(); j++) { // ee.addCell(row, j, dataList.get(i).get(j)); // } // } // // ee.writeFile("target/export.xlsx"); // // ee.dispose(); // // log.debug("Export success."); // // } /** * 通过模板构造函数 * @param title 表格标题,传“空值”,表示无标题 * @param headerList 表头数组 * @param modle 模板的上级目录 * @param fileName EXCEL模板文件名 */ public ExportExcel(String title, List<String> headerList,String modle,String fileName) { String dir = FileUtils.getAbsolutePath(modle+"/WEB-INF/model/"+modle+"/"+fileName); FileObject template = null; InputStream is = null; try { //获取默认的空白excel文档模板的文件对象 FileSystemManager fsmWrapped=VFS.getManager();; template = fsmWrapped.resolveFile(dir); is = template.getContent().getInputStream(); //读取模板文件,放入文件流,生成工作簿对象 this.wb = new SXSSFWorkbook(new XSSFWorkbook(is)); this.sheet = wb.getSheetAt(0); this.styles = createStyles(wb); template.close(); } catch (Exception e) { log.error("模板获取失败,文件路径:"+dir); }finally{ IOUtils.closeQuietly(is); } initialize(title,headerList); } /** * 添加数据(通过annotation.ExportField添加数据) * @return list 数据列表 */ public ExportExcel setDataList(List<Map<String,Object>> list,String[] colName){ for (Map<String,Object> e : list){ int colunm = 0; Row row = this.addRow(); StringBuilder sb = new StringBuilder(); for (String n : colName){ Object val = e.get(n); this.addCell(row, colunm++, val, 2,String.class); sb.append(val + ", "); } log.debug("Write success: ["+row.getRowNum()+"] "+sb.toString()); } return this; } public void setRownum(int rownum) { this.rownum = rownum; } }
{ "content_hash": "a7e20c6e3abee72365f6582ad350b5e0", "timestamp": "", "source": "github", "line_count": 533, "max_line_length": 115, "avg_line_length": 28.786116322701687, "alnum_prop": 0.6502639640226814, "repo_name": "zkai7/wpf", "id": "f9177fb69a62e799dd33410d8e3fb7afd38bc007", "size": "16389", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/thinkgem/jeesite/common/utils/excel/ExportExcel.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "3753" }, { "name": "Batchfile", "bytes": "6843" }, { "name": "CSS", "bytes": "1307457" }, { "name": "HTML", "bytes": "2894068" }, { "name": "Java", "bytes": "1712065" }, { "name": "JavaScript", "bytes": "9662532" }, { "name": "PHP", "bytes": "8060" }, { "name": "PLSQL", "bytes": "73251" }, { "name": "PLpgSQL", "bytes": "2431" } ] }
using System; namespace Azure.ResourceManager.HealthBot.Models { /// <summary> The properties of a Azure Health Bot. The Health Bot Service is a cloud platform that empowers developers in Healthcare organizations to build and deploy their compliant, AI-powered virtual health assistants and health bots, that help them improve processes and reduce costs. </summary> public partial class HealthBotProperties { /// <summary> Initializes a new instance of HealthBotProperties. </summary> public HealthBotProperties() { } /// <summary> Initializes a new instance of HealthBotProperties. </summary> /// <param name="provisioningState"> The provisioning state of the Azure Health Bot resource. </param> /// <param name="botManagementPortalLink"> The link. </param> /// <param name="keyVaultProperties"> KeyVault properties for the resource encryption. </param> internal HealthBotProperties(string provisioningState, Uri botManagementPortalLink, HealthBotKeyVaultProperties keyVaultProperties) { ProvisioningState = provisioningState; BotManagementPortalLink = botManagementPortalLink; KeyVaultProperties = keyVaultProperties; } /// <summary> The provisioning state of the Azure Health Bot resource. </summary> public string ProvisioningState { get; } /// <summary> The link. </summary> public Uri BotManagementPortalLink { get; } /// <summary> KeyVault properties for the resource encryption. </summary> public HealthBotKeyVaultProperties KeyVaultProperties { get; set; } } }
{ "content_hash": "3d8dbfb72d4c2f4f0a27dd25842f4b38", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 304, "avg_line_length": 53.58064516129032, "alnum_prop": 0.7049969897652016, "repo_name": "Azure/azure-sdk-for-net", "id": "380459e8c16234704920b61171094b77c0f806da", "size": "1799", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "sdk/healthbot/Azure.ResourceManager.HealthBot/src/Generated/Models/HealthBotProperties.cs", "mode": "33188", "license": "mit", "language": [] }
@class XMPPJID; @interface ServerlessDemoAppDelegate : NSObject <UIApplicationDelegate> { XMPPJID *myJID; NSManagedObjectModel *managedObjectModel; NSManagedObjectContext *managedObjectContext; NSPersistentStoreCoordinator *persistentStoreCoordinator; UIWindow *window; UINavigationController *navigationController; } @property (nonatomic, readonly) NSManagedObjectModel *managedObjectModel; @property (nonatomic, readonly) NSManagedObjectContext *managedObjectContext; @property (nonatomic, readonly) NSPersistentStoreCoordinator *persistentStoreCoordinator; @property (nonatomic) IBOutlet UIWindow *window; @property (nonatomic) IBOutlet UINavigationController *navigationController; @property (nonatomic) XMPPJID *myJID; - (NSString *)applicationDocumentsDirectory; - (NSData *)IPv4AddressFromAddresses:(NSArray *)addresses; - (NSString *)stringFromAddress:(NSData *)address; @end
{ "content_hash": "7661c4ebbf1039001665710f47484ef2", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 89, "avg_line_length": 29.225806451612904, "alnum_prop": 0.8222958057395143, "repo_name": "karanth/react-native-xmpp-mam", "id": "d07cb08e6cf47f76fdb798ab6841439df4ca736b", "size": "931", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "XMPPFramework/Xcode/ServerlessDemo/Classes/ServerlessDemoAppDelegate.h", "mode": "33261", "license": "bsd-2-clause", "language": [ { "name": "Java", "bytes": "426578" }, { "name": "JavaScript", "bytes": "14638" }, { "name": "Objective-C", "bytes": "2019061" }, { "name": "Python", "bytes": "1641" }, { "name": "Ruby", "bytes": "254" }, { "name": "Shell", "bytes": "66" } ] }
[![Join the chat at https://gitter.im/shiraji/new-instance-inspection](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/shiraji/new-instance-inspection?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Android Arsenal](https://img.shields.io/badge/Android%20Arsenal-Fragment%20newInstance%20Inspection-brightgreen.svg?style=flat)](http://android-arsenal.com/details/1/2348) This plugin add an inspection that check if Fragment implements following method. (if the class is MyFramgment) ```MyFramgment.java public static MyFragment newInstance() { MyFragment fragment = new MyFragment(); return fragment; } ``` ![screenshot](website/images/new_instance_inspection_screenshot.gif) ## Settings To change settings, go to `Preferences > Editor > Inspections > Android > Fragment should implement newInstance()` ### Method name The method name `newInstance` is configurable. ## Installation Use the IDE's plugin manager to install the latest version of the plugin. or [Download](https://github.com/shiraji/new-instance-inspection/blob/master/new-instance-inspection.jar?raw=true) jar file and then go to Preferences > Plugins > Install plugin from disk... > Select the jar file you downloaded ## Contributing 1. Fork it! 2. Create your feature branch: `git checkout -b my-new-feature` 3. Make sure you put gradle.jar to 'IntelliJ Plugin SDK'. `File > Project Structure... > SDKs > IntelliJ Plugin SDK > click + sign > Pick gradle.jar file from under IntelliJ IDEA's plugin directory` 4. Commit your changes: `git commit -am 'Add some feature'` 5. Push to the branch: `git push origin my-new-feature` 6. Submit a pull request ## License ``` Copyright 2015 Yoshinori Isogai Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ```
{ "content_hash": "d69f44941d52cb907b4482c93898b36b", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 413, "avg_line_length": 41.03636363636364, "alnum_prop": 0.768276473194506, "repo_name": "shiraji/new-instance-inspection", "id": "8f5a5349e552fdf15b098eddc7234a6149e46c2c", "size": "2292", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "119" }, { "name": "Java", "bytes": "7171" } ] }
namespace blink { class ArrayBufferOrArrayBufferView; // This class is for passing around un-owned bytes as a pointer + length. // It supports implicit conversion from several other data types. // // ArrayPiece has the concept of being "null". This is different from an empty // byte range. It is invalid to call methods other than isNull() on such // instances. // // IMPORTANT: The data contained by ArrayPiece is NOT OWNED, so caution must be // taken to ensure it is kept alive. class CORE_EXPORT DOMArrayPiece : public WTF::ArrayPiece { DISALLOW_NEW(); public: enum InitWithUnionOption { // Initialize this object as "null" when initialized with an union which // holds null. TreatNullAsNull, // Initialize this object so this points to null pointer with zero size // when initialized with an union which holds null. AllowNullPointToNullWithZeroSize, }; DOMArrayPiece() {} DOMArrayPiece(DOMArrayBuffer* buffer) : ArrayPiece(buffer->buffer()) {} DOMArrayPiece(DOMArrayBufferView* view) : ArrayPiece(view->view()) {} DOMArrayPiece(const ArrayBufferOrArrayBufferView&, InitWithUnionOption = TreatNullAsNull); bool operator==(const DOMArrayBuffer& other) const { return byteLength() == other.byteLength() && memcmp(data(), other.data(), byteLength()) == 0; } bool operator==(const DOMArrayBufferView& other) const { return byteLength() == other.byteLength() && memcmp(data(), other.baseAddress(), byteLength()) == 0; } }; } // namespace blink #endif // DOMArrayPiece_h
{ "content_hash": "14bb3f03b1cd99e30d3f89f4079664d4", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 79, "avg_line_length": 34.43478260869565, "alnum_prop": 0.702020202020202, "repo_name": "google-ar/WebARonARCore", "id": "1fc00e4883d5346d66aca00b9afbbb1b8770666c", "size": "1919", "binary": false, "copies": "6", "ref": "refs/heads/webarcore_57.0.2987.5", "path": "third_party/WebKit/Source/core/dom/DOMArrayPiece.h", "mode": "33188", "license": "apache-2.0", "language": [] }
namespace Nancy.JsonPatch.OperationProcessor { internal class JsonPatchOperationExecutorResult { public bool Succeeded { get; set; } public string Message { get; set; } } }
{ "content_hash": "3cbbc86114d51528283baad8405f1b21", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 51, "avg_line_length": 25, "alnum_prop": 0.68, "repo_name": "DSaunders/Nancy.JsonPatch", "id": "379c51c03937a6f0ceddf0a11cc37cff3b1acc5a", "size": "200", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Nancy.JsonPatch/OperationProcessor/JsonPatchOperationExecutorResult.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "80676" } ] }
End of preview.

No dataset card yet

Downloads last month
2