text stringlengths 1 1.05M |
|---|
<reponame>bytecode-77/fastpix3d<gh_stars>10-100
#include "FastPix3D.h"
bool Drawer::IsStencilTriangle;
bool Drawer::IsLastTriangleCulled;
float Drawer::Dz;
float Drawer::Du;
float Drawer::Dv;
float Drawer::Dr;
float Drawer::Dg;
float Drawer::Db;
float Drawer::Dz16;
float Drawer::Du16;
float Drawer::Dv16;
float Drawer::Dr16;
float Drawer::Dg16;
float Drawer::Db16;
void Drawer::Initialize()
{
IsStencilTriangle = false;
}
void Drawer::DrawTriangle(Matrix modelSpace, Vertex vertex1, Vertex vertex2, Vertex vertex3)
{
Matrix worldSpace = modelSpace * RenderStates::CameraSpace;
ProcessedVertex v1 = (ProcessedVertex)vertex1;
ProcessedVertex v2 = (ProcessedVertex)vertex2;
ProcessedVertex v3 = (ProcessedVertex)vertex3;
v1.Position = worldSpace * v1.Position;
v2.Position = worldSpace * v2.Position;
v3.Position = worldSpace * v3.Position;
Vector3f normal = v1.Position * (v3.Position - v2.Position).CrossProduct(v2.Position - v1.Position);
IsLastTriangleCulled = normal.X + normal.Y + normal.Z < 0;
if (RenderStates::CullMode == CullMode::Back && IsLastTriangleCulled || RenderStates::CullMode == CullMode::Front && !IsLastTriangleCulled)
{
return;
}
if (!IsStencilTriangle)
{
Vector3f vertex1Normals = worldSpace.MultiplyRotationOnly(v1.Normals);
Vector3f vertex2Normals = worldSpace.MultiplyRotationOnly(v2.Normals);
Vector3f vertex3Normals = worldSpace.MultiplyRotationOnly(v3.Normals);
float r1 = float(Light::AmbientColor.R);
float g1 = float(Light::AmbientColor.G);
float b1 = float(Light::AmbientColor.B);
float r2 = float(Light::AmbientColor.R);
float g2 = float(Light::AmbientColor.G);
float b2 = float(Light::AmbientColor.B);
float r3 = float(Light::AmbientColor.R);
float g3 = float(Light::AmbientColor.G);
float b3 = float(Light::AmbientColor.B);
if (RenderStates::EnableLights)
{
for (int32 i = 0; i < 8; i++)
{
Light light = *RenderStates::Lights[i];
if (light.Enabled)
{
Vector3f lightVector, position, distance;
float intensity, diffuse;
switch (light.Type)
{
case LightType::Directional:
lightVector = RenderStates::CameraSpace.MultiplyRotationOnly(Matrix::RotateXYZ(light.Rotation) * Vector3f(0, 0, -1));
intensity = light.Intensity * lightVector.DotProduct(lightVector);
if ((diffuse = lightVector.DotProduct(vertex1Normals) * intensity) > 0)
{
r1 += light.DiffuseColor.R * diffuse;
g1 += light.DiffuseColor.G * diffuse;
b1 += light.DiffuseColor.B * diffuse;
}
if ((diffuse = lightVector.DotProduct(vertex2Normals) * intensity) > 0)
{
r2 += light.DiffuseColor.R * diffuse;
g2 += light.DiffuseColor.G * diffuse;
b2 += light.DiffuseColor.B * diffuse;
}
if ((diffuse = lightVector.DotProduct(vertex3Normals) * intensity) > 0)
{
r3 += light.DiffuseColor.R * diffuse;
g3 += light.DiffuseColor.G * diffuse;
b3 += light.DiffuseColor.B * diffuse;
}
break;
case LightType::Point:
position = RenderStates::CameraSpace * light.Position;
distance = position - v1.Position;
if ((diffuse = distance.DotProduct(vertex1Normals) * light.Intensity / distance.DotProduct(distance)) > 0)
{
r1 += light.DiffuseColor.R * diffuse;
g1 += light.DiffuseColor.G * diffuse;
b1 += light.DiffuseColor.B * diffuse;
}
distance = position - v2.Position;
if ((diffuse = distance.DotProduct(vertex2Normals) * light.Intensity / distance.DotProduct(distance)) > 0)
{
r2 += light.DiffuseColor.R * diffuse;
g2 += light.DiffuseColor.G * diffuse;
b2 += light.DiffuseColor.B * diffuse;
}
distance = position - v3.Position;
if ((diffuse = distance.DotProduct(vertex3Normals) * light.Intensity / distance.DotProduct(distance)) > 0)
{
r3 += light.DiffuseColor.R * diffuse;
g3 += light.DiffuseColor.G * diffuse;
b3 += light.DiffuseColor.B * diffuse;
}
break;
case LightType::Spot:
lightVector = RenderStates::CameraSpace.MultiplyRotationOnly(Matrix::RotateXYZ(light.Rotation) * Vector3f(0, 0, -1));
position = RenderStates::CameraSpace * light.Position;
distance = position - v1.Position;
if ((diffuse = distance.DotProduct(vertex1Normals) * light.Intensity / distance.DotProduct(distance) * MathUtility::Interpolate(acos(distance.Normalize().DotProduct(lightVector)), -1, 1, 2, 0)) > 0)
{
r1 += light.DiffuseColor.R * diffuse;
g1 += light.DiffuseColor.G * diffuse;
b1 += light.DiffuseColor.B * diffuse;
}
distance = position - v2.Position;
if ((diffuse = distance.DotProduct(vertex2Normals) * light.Intensity / distance.DotProduct(distance) * MathUtility::Interpolate(acos(distance.Normalize().DotProduct(lightVector)), -1, 1, 2, 0)) > 0)
{
r2 += light.DiffuseColor.R * diffuse;
g2 += light.DiffuseColor.G * diffuse;
b2 += light.DiffuseColor.B * diffuse;
}
distance = position - v3.Position;
if ((diffuse = distance.DotProduct(vertex3Normals) * light.Intensity / distance.DotProduct(distance) * MathUtility::Interpolate(acos(distance.Normalize().DotProduct(lightVector)), -1, 1, 2, 0)) > 0)
{
r3 += light.DiffuseColor.R * diffuse;
g3 += light.DiffuseColor.G * diffuse;
b3 += light.DiffuseColor.B * diffuse;
}
break;
}
}
}
}
else
{
r1 = g1 = b1 = 255;
r2 = g2 = b2 = 255;
r3 = g3 = b3 = 255;
}
v1.R *= r1 > 256 ? 1 : r1 / 256;
v1.G *= g1 > 256 ? 1 : g1 / 256;
v1.B *= b1 > 256 ? 1 : b1 / 256;
v2.R *= r2 > 256 ? 1 : r2 / 256;
v2.G *= g2 > 256 ? 1 : g2 / 256;
v2.B *= b2 > 256 ? 1 : b2 / 256;
v3.R *= r3 > 256 ? 1 : r3 / 256;
v3.G *= g3 > 256 ? 1 : g3 / 256;
v3.B *= b3 > 256 ? 1 : b3 / 256;
}
float d = 1 / RenderStates::ClipNear;
v1.Position *= d;
v2.Position *= d;
v3.Position *= d;
bool vertex1Visible = v1.Position.Z > 1;
bool vertex2Visible = v2.Position.Z > 1;
bool vertex3Visible = v3.Position.Z > 1;
if (vertex1Visible && vertex2Visible && vertex3Visible)
{
DrawClippedTriangle(v1, v2, v3);
}
else if (vertex1Visible || vertex2Visible || vertex3Visible)
{
ProcessedVertex v12, v23, v31;
v12.Position.X = MathUtility::Interpolate(1, v1.Position.Z, v2.Position.Z, v1.Position.X, v2.Position.X);
v12.Position.Y = MathUtility::Interpolate(1, v1.Position.Z, v2.Position.Z, v1.Position.Y, v2.Position.Y);
v12.Position.Z = 1;
v23.Position.X = MathUtility::Interpolate(1, v2.Position.Z, v3.Position.Z, v2.Position.X, v3.Position.X);
v23.Position.Y = MathUtility::Interpolate(1, v2.Position.Z, v3.Position.Z, v2.Position.Y, v3.Position.Y);
v23.Position.Z = 1;
v31.Position.X = MathUtility::Interpolate(1, v3.Position.Z, v1.Position.Z, v3.Position.X, v1.Position.X);
v31.Position.Y = MathUtility::Interpolate(1, v3.Position.Z, v1.Position.Z, v3.Position.Y, v1.Position.Y);
v31.Position.Z = 1;
if (!IsStencilTriangle)
{
v12.TextureCoordinates.X = MathUtility::Interpolate(1, v1.Position.Z, v2.Position.Z, v1.TextureCoordinates.X, v2.TextureCoordinates.X);
v12.TextureCoordinates.Y = MathUtility::Interpolate(1, v1.Position.Z, v2.Position.Z, v1.TextureCoordinates.Y, v2.TextureCoordinates.Y);
v12.R = MathUtility::Interpolate(1, v1.Position.Z, v2.Position.Z, v1.R, v2.R);
v12.G = MathUtility::Interpolate(1, v1.Position.Z, v2.Position.Z, v1.G, v2.G);
v12.B = MathUtility::Interpolate(1, v1.Position.Z, v2.Position.Z, v1.B, v2.B);
v23.TextureCoordinates.X = MathUtility::Interpolate(1, v2.Position.Z, v3.Position.Z, v2.TextureCoordinates.X, v3.TextureCoordinates.X);
v23.TextureCoordinates.Y = MathUtility::Interpolate(1, v2.Position.Z, v3.Position.Z, v2.TextureCoordinates.Y, v3.TextureCoordinates.Y);
v23.R = MathUtility::Interpolate(1, v2.Position.Z, v3.Position.Z, v2.R, v3.R);
v23.G = MathUtility::Interpolate(1, v2.Position.Z, v3.Position.Z, v2.G, v3.G);
v23.B = MathUtility::Interpolate(1, v2.Position.Z, v3.Position.Z, v2.B, v3.B);
v31.TextureCoordinates.X = MathUtility::Interpolate(1, v3.Position.Z, v1.Position.Z, v3.TextureCoordinates.X, v1.TextureCoordinates.X);
v31.TextureCoordinates.Y = MathUtility::Interpolate(1, v3.Position.Z, v1.Position.Z, v3.TextureCoordinates.Y, v1.TextureCoordinates.Y);
v31.R = MathUtility::Interpolate(1, v3.Position.Z, v1.Position.Z, v3.R, v1.R);
v31.G = MathUtility::Interpolate(1, v3.Position.Z, v1.Position.Z, v3.G, v1.G);
v31.B = MathUtility::Interpolate(1, v3.Position.Z, v1.Position.Z, v3.B, v1.B);
}
if (vertex1Visible && vertex2Visible)
{
DrawClippedTriangle(v31, v1, v23);
DrawClippedTriangle(v1, v2, v23);
}
else if (vertex2Visible && vertex3Visible)
{
DrawClippedTriangle(v3, v31, v2);
DrawClippedTriangle(v12, v2, v31);
}
else if (vertex1Visible && vertex3Visible)
{
DrawClippedTriangle(v1, v12, v23);
DrawClippedTriangle(v3, v1, v23);
}
else if (vertex1Visible)
{
DrawClippedTriangle(v1, v12, v31);
}
else if (vertex2Visible)
{
DrawClippedTriangle(v2, v23, v12);
}
else if (vertex3Visible)
{
DrawClippedTriangle(v3, v31, v23);
}
}
}
void Drawer::DrawStencilTriangle(Matrix modelSpace, Vector3f vertex1Position, Vector3f vertex2Position, Vector3f vertex3Position)
{
IsStencilTriangle = true;
DrawTriangle(modelSpace, Vertex(vertex1Position), Vertex(vertex2Position), Vertex(vertex3Position));
IsStencilTriangle = false;
}
void Drawer::DrawClippedTriangle(ProcessedVertex vertex1, ProcessedVertex vertex2, ProcessedVertex vertex3)
{
int32 width2 = Device::Width >> 1, height2 = Device::Height >> 1;
float d = width2 * RenderStates::Zoom;
vertex1.ProjectedPosition.X = width2 + int32(vertex1.Position.X * d / vertex1.Position.Z);
vertex1.ProjectedPosition.Y = height2 - int32(vertex1.Position.Y * d / vertex1.Position.Z);
vertex2.ProjectedPosition.X = width2 + int32(vertex2.Position.X * d / vertex2.Position.Z);
vertex2.ProjectedPosition.Y = height2 - int32(vertex2.Position.Y * d / vertex2.Position.Z);
vertex3.ProjectedPosition.X = width2 + int32(vertex3.Position.X * d / vertex3.Position.Z);
vertex3.ProjectedPosition.Y = height2 - int32(vertex3.Position.Y * d / vertex3.Position.Z);
if (vertex1.ProjectedPosition.X < 0 && vertex2.ProjectedPosition.X < 0 && vertex3.ProjectedPosition.X < 0 ||
vertex1.ProjectedPosition.Y < 0 && vertex2.ProjectedPosition.Y < 0 && vertex3.ProjectedPosition.Y < 0 ||
vertex1.ProjectedPosition.X >= Device::Width && vertex2.ProjectedPosition.X >= Device::Width && vertex3.ProjectedPosition.X >= Device::Width ||
vertex1.ProjectedPosition.Y >= Device::Height && vertex2.ProjectedPosition.Y >= Device::Height && vertex3.ProjectedPosition.Y >= Device::Height) return;
if (vertex1.ProjectedPosition.Y > vertex2.ProjectedPosition.Y)
{
swap(vertex1, vertex2);
}
if (vertex2.ProjectedPosition.Y > vertex3.ProjectedPosition.Y)
{
swap(vertex2, vertex3);
if (vertex1.ProjectedPosition.Y > vertex2.ProjectedPosition.Y)
{
swap(vertex1, vertex2);
}
}
vertex1.Position.Z = 1 / vertex1.Position.Z;
vertex2.Position.Z = 1 / vertex2.Position.Z;
vertex3.Position.Z = 1 / vertex3.Position.Z;
if (!IsStencilTriangle)
{
if (vertex1.TextureCoordinates.X < 0 || vertex2.TextureCoordinates.X < 0 || vertex3.TextureCoordinates.X < 0)
{
float dif = floor(1 - min(vertex1.TextureCoordinates.X, min(vertex2.TextureCoordinates.X, vertex3.TextureCoordinates.X)));
vertex1.TextureCoordinates.X += dif;
vertex2.TextureCoordinates.X += dif;
vertex3.TextureCoordinates.X += dif;
}
if (vertex1.TextureCoordinates.Y < 0 || vertex2.TextureCoordinates.Y < 0 || vertex3.TextureCoordinates.Y < 0)
{
float dif = floor(1 - min(vertex1.TextureCoordinates.Y, min(vertex2.TextureCoordinates.Y, vertex3.TextureCoordinates.Y)));
vertex1.TextureCoordinates.Y += dif;
vertex2.TextureCoordinates.Y += dif;
vertex3.TextureCoordinates.Y += dif;
}
vertex1.TextureCoordinates.X *= RenderStates::CurrentTexture->Width * vertex1.Position.Z;
vertex1.TextureCoordinates.Y *= RenderStates::CurrentTexture->Height * vertex1.Position.Z;
vertex1.R *= vertex1.Position.Z;
vertex1.G *= vertex1.Position.Z;
vertex1.B *= vertex1.Position.Z;
vertex2.TextureCoordinates.X *= RenderStates::CurrentTexture->Width * vertex2.Position.Z;
vertex2.TextureCoordinates.Y *= RenderStates::CurrentTexture->Height * vertex2.Position.Z;
vertex2.R *= vertex2.Position.Z;
vertex2.G *= vertex2.Position.Z;
vertex2.B *= vertex2.Position.Z;
vertex3.TextureCoordinates.X *= RenderStates::CurrentTexture->Width * vertex3.Position.Z;
vertex3.TextureCoordinates.Y *= RenderStates::CurrentTexture->Height * vertex3.Position.Z;
vertex3.R *= vertex3.Position.Z;
vertex3.G *= vertex3.Position.Z;
vertex3.B *= vertex3.Position.Z;
}
int32 v1y = min(max(vertex1.ProjectedPosition.Y, 0), Device::Height - 1);
int32 v2y = min(max(vertex2.ProjectedPosition.Y, 0), Device::Height - 1);
int32 v3y = min(max(vertex3.ProjectedPosition.Y, 0), Device::Height - 1);
if (vertex1.ProjectedPosition.Y < Device::Height && vertex2.ProjectedPosition.Y >= 0 || vertex2.ProjectedPosition.Y < Device::Height && vertex3.ProjectedPosition.Y >= 0)
{
float v13u, v13v, v13r, v13g, v13b;
float d = (vertex2.ProjectedPosition.Y - vertex1.ProjectedPosition.Y) * 1.f / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y);
float v13x = vertex1.ProjectedPosition.X + (vertex3.ProjectedPosition.X - vertex1.ProjectedPosition.X) * d;
float v13z = vertex1.Position.Z + (vertex3.Position.Z - vertex1.Position.Z) * d;
if (!IsStencilTriangle)
{
v13u = vertex1.TextureCoordinates.X + (vertex3.TextureCoordinates.X - vertex1.TextureCoordinates.X) * d;
v13v = vertex1.TextureCoordinates.Y + (vertex3.TextureCoordinates.Y - vertex1.TextureCoordinates.Y) * d;
v13r = vertex1.R + (vertex3.R - vertex1.R) * d;
v13g = vertex1.G + (vertex3.G - vertex1.G) * d;
v13b = vertex1.B + (vertex3.B - vertex1.B) * d;
}
d = 1.f / (v13x - vertex2.ProjectedPosition.X);
Dz = (v13z - vertex2.Position.Z) * d;
Dz16 = Dz * Subdiv1;
if (IsStencilTriangle)
{
if (vertex1.ProjectedPosition.Y < Device::Height && vertex2.ProjectedPosition.Y >= 0)
{
for (int32 y = v1y; y < v2y; y++)
{
DrawStencilScanline(
y,
vertex1.ProjectedPosition.X + (y - vertex1.ProjectedPosition.Y) * (vertex2.ProjectedPosition.X - vertex1.ProjectedPosition.X) / (vertex2.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.Position.Z + (y - vertex1.ProjectedPosition.Y) * (vertex2.Position.Z - vertex1.Position.Z) / (vertex2.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.ProjectedPosition.X + (y - vertex1.ProjectedPosition.Y) * (vertex3.ProjectedPosition.X - vertex1.ProjectedPosition.X) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.Position.Z + (y - vertex1.ProjectedPosition.Y) * (vertex3.Position.Z - vertex1.Position.Z) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y)
);
}
}
if (vertex2.ProjectedPosition.Y < Device::Height && vertex3.ProjectedPosition.Y >= 0)
{
for (int32 y = v2y; y < v3y; y++)
{
DrawStencilScanline(
y,
vertex2.ProjectedPosition.X + (y - vertex2.ProjectedPosition.Y) * (vertex3.ProjectedPosition.X - vertex2.ProjectedPosition.X) / (vertex3.ProjectedPosition.Y - vertex2.ProjectedPosition.Y),
vertex2.Position.Z + (y - vertex2.ProjectedPosition.Y) * (vertex3.Position.Z - vertex2.Position.Z) / (vertex3.ProjectedPosition.Y - vertex2.ProjectedPosition.Y),
vertex1.ProjectedPosition.X + (y - vertex1.ProjectedPosition.Y) * (vertex3.ProjectedPosition.X - vertex1.ProjectedPosition.X) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.Position.Z + (y - vertex1.ProjectedPosition.Y) * (vertex3.Position.Z - vertex1.Position.Z) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y)
);
}
}
}
else
{
Du = (v13u - vertex2.TextureCoordinates.X) * d;
Dv = (v13v - vertex2.TextureCoordinates.Y) * d;
Dr = (v13r - vertex2.R) * d;
Dg = (v13g - vertex2.G) * d;
Db = (v13b - vertex2.B) * d;
Du16 = Du * Subdiv1;
Dv16 = Dv * Subdiv1;
Dr16 = Dr * Subdiv1;
Dg16 = Dg * Subdiv1;
Db16 = Db * Subdiv1;
if (vertex1.ProjectedPosition.Y < Device::Height && vertex2.ProjectedPosition.Y >= 0)
{
for (int32 y = v1y; y < v2y; y++)
{
DrawScanline(
y,
vertex1.ProjectedPosition.X + (y - vertex1.ProjectedPosition.Y) * (vertex2.ProjectedPosition.X - vertex1.ProjectedPosition.X) / (vertex2.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.Position.Z + (y - vertex1.ProjectedPosition.Y) * (vertex2.Position.Z - vertex1.Position.Z) / (vertex2.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.TextureCoordinates.X + (y - vertex1.ProjectedPosition.Y) * (vertex2.TextureCoordinates.X - vertex1.TextureCoordinates.X) / (vertex2.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.TextureCoordinates.Y + (y - vertex1.ProjectedPosition.Y) * (vertex2.TextureCoordinates.Y - vertex1.TextureCoordinates.Y) / (vertex2.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.R + (y - vertex1.ProjectedPosition.Y) * (vertex2.R - vertex1.R) / (vertex2.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.G + (y - vertex1.ProjectedPosition.Y) * (vertex2.G - vertex1.G) / (vertex2.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.B + (y - vertex1.ProjectedPosition.Y) * (vertex2.B - vertex1.B) / (vertex2.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.ProjectedPosition.X + (y - vertex1.ProjectedPosition.Y) * (vertex3.ProjectedPosition.X - vertex1.ProjectedPosition.X) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.Position.Z + (y - vertex1.ProjectedPosition.Y) * (vertex3.Position.Z - vertex1.Position.Z) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.TextureCoordinates.X + (y - vertex1.ProjectedPosition.Y) * (vertex3.TextureCoordinates.X - vertex1.TextureCoordinates.X) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.TextureCoordinates.Y + (y - vertex1.ProjectedPosition.Y) * (vertex3.TextureCoordinates.Y - vertex1.TextureCoordinates.Y) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.R + (y - vertex1.ProjectedPosition.Y) * (vertex3.R - vertex1.R) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.G + (y - vertex1.ProjectedPosition.Y) * (vertex3.G - vertex1.G) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.B + (y - vertex1.ProjectedPosition.Y) * (vertex3.B - vertex1.B) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y)
);
}
}
if (vertex2.ProjectedPosition.Y < Device::Height && vertex3.ProjectedPosition.Y >= 0)
{
for (int32 y = v2y; y < v3y; y++)
{
DrawScanline(
y,
vertex2.ProjectedPosition.X + (y - vertex2.ProjectedPosition.Y) * (vertex3.ProjectedPosition.X - vertex2.ProjectedPosition.X) / (vertex3.ProjectedPosition.Y - vertex2.ProjectedPosition.Y),
vertex2.Position.Z + (y - vertex2.ProjectedPosition.Y) * (vertex3.Position.Z - vertex2.Position.Z) / (vertex3.ProjectedPosition.Y - vertex2.ProjectedPosition.Y),
vertex2.TextureCoordinates.X + (y - vertex2.ProjectedPosition.Y) * (vertex3.TextureCoordinates.X - vertex2.TextureCoordinates.X) / (vertex3.ProjectedPosition.Y - vertex2.ProjectedPosition.Y),
vertex2.TextureCoordinates.Y + (y - vertex2.ProjectedPosition.Y) * (vertex3.TextureCoordinates.Y - vertex2.TextureCoordinates.Y) / (vertex3.ProjectedPosition.Y - vertex2.ProjectedPosition.Y),
vertex2.R + (y - vertex2.ProjectedPosition.Y) * (vertex3.R - vertex2.R) / (vertex3.ProjectedPosition.Y - vertex2.ProjectedPosition.Y),
vertex2.G + (y - vertex2.ProjectedPosition.Y) * (vertex3.G - vertex2.G) / (vertex3.ProjectedPosition.Y - vertex2.ProjectedPosition.Y),
vertex2.B + (y - vertex2.ProjectedPosition.Y) * (vertex3.B - vertex2.B) / (vertex3.ProjectedPosition.Y - vertex2.ProjectedPosition.Y),
vertex1.ProjectedPosition.X + (y - vertex1.ProjectedPosition.Y) * (vertex3.ProjectedPosition.X - vertex1.ProjectedPosition.X) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.Position.Z + (y - vertex1.ProjectedPosition.Y) * (vertex3.Position.Z - vertex1.Position.Z) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.TextureCoordinates.X + (y - vertex1.ProjectedPosition.Y) * (vertex3.TextureCoordinates.X - vertex1.TextureCoordinates.X) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.TextureCoordinates.Y + (y - vertex1.ProjectedPosition.Y) * (vertex3.TextureCoordinates.Y - vertex1.TextureCoordinates.Y) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.R + (y - vertex1.ProjectedPosition.Y) * (vertex3.R - vertex1.R) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.G + (y - vertex1.ProjectedPosition.Y) * (vertex3.G - vertex1.G) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y),
vertex1.B + (y - vertex1.ProjectedPosition.Y) * (vertex3.B - vertex1.B) / (vertex3.ProjectedPosition.Y - vertex1.ProjectedPosition.Y)
);
}
}
}
}
}
void Drawer::DrawScanline(int32 y, int32 v1x, float v1z, float v1u, float v1v, float v1r, float v1g, float v1b, int32 v2x, float v2z, float v2u, float v2v, float v2r, float v2g, float v2b)
{
if (v1x > v2x)
{
MathUtility::Swap(v1x, v2x);
MathUtility::Swap(v1z, v2z);
MathUtility::Swap(v1u, v2u);
MathUtility::Swap(v1v, v2v);
MathUtility::Swap(v1r, v2r);
MathUtility::Swap(v1g, v2g);
MathUtility::Swap(v1b, v2b);
}
if (v2x < 0 || v1x >= Device::Width) return;
if (v1x < 0)
{
float d = v1x * 1.f / (v2x - v1x);
v1z -= (v2z - v1z) * d;
v1u -= (v2u - v1u) * d;
v1v -= (v2v - v1v) * d;
v1r -= (v2r - v1r) * d;
v1g -= (v2g - v1g) * d;
v1b -= (v2b - v1b) * d;
v1x = 0;
}
if (v2x >= Device::Width) v2x = Device::Width - 1;
int32 *backBuffer = &Device::BackBuffer[v1x + y * Device::Width];
int16 *depthBuffer = &Device::DepthBuffer[v1x + y * Device::Width];
sbyte *stencilBuffer = &Device::StencilBuffer[v1x + y * Device::Width];
int32 *textureBuffer = RenderStates::CurrentTexture->Buffer;
int32 textureWidthExponent = RenderStates::CurrentTexture->WidthExponent;
float z = v1z, u = v1u, v = v1v, r = v1r, g = v1g, b = v1b;
int32 tw = RenderStates::CurrentTexture->Width - 1;
int32 th = RenderStates::CurrentTexture->Height - 1;
int32 twe = RenderStates::CurrentTexture->WidthExponent;
int32 subdivs = ((v2x - v1x) >> SubdivExponent) + 1;
for (int32 i = 0; i < subdivs; i++)
{
int32 pixels = i < subdivs - 1 ? Subdiv1 : (v2x - v1x) & SubdivModulo;
float z1 = 1 / z, z2 = 1 / (z + Dz16);
float su = u * z1;
float sv = v * z1;
float sr = r * z1;
float sg = g * z1;
float sb = b * z1;
float du = ((u + Du16) * z2 - su) * InvertedSubdiv1;
float dv = ((v + Dv16) * z2 - sv) * InvertedSubdiv1;
float dr = ((r + Dr16) * z2 - sr) * InvertedSubdiv1;
float dg = ((g + Dg16) * z2 - sg) * InvertedSubdiv1;
float db = ((b + Db16) * z2 - sb) * InvertedSubdiv1;
for (int32 i = 0; i < pixels; i++)
{
int16 depth = int16(z * 65536);
if (*depthBuffer <= depth && (!RenderStates::EnableStencilMask || *stencilBuffer))
{
byte *color = (byte*)&textureBuffer[(int32(su) & tw) | ((int32(sv) & th) << twe)];
if (*(int32*)color != TextureTransparencyKey)
{
*backBuffer = ((color[0] * int32(sr)) >> 8) << 16 | ((color[1] * int32(sg)) >> 8) << 8 | (color[2] * int32(sb)) >> 8;
if (RenderStates::EnableZWrites) *depthBuffer = depth;
}
}
backBuffer++;
depthBuffer++;
stencilBuffer++;
z += Dz;
su += du;
sv += dv;
sr += dr;
sg += dg;
sb += db;
}
u += Du16;
v += Dv16;
r += Dr16;
g += Dg16;
b += Db16;
}
}
void Drawer::DrawStencilScanline(int32 y, int32 v1x, float v1z, int32 v2x, float v2z)
{
if (v1x > v2x)
{
MathUtility::Swap(v1x, v2x);
MathUtility::Swap(v1z, v2z);
}
if (v2x < 0 || v1x >= Device::Width) return;
if (v1x < 0)
{
v1z -= (v2z - v1z) * v1x / (v2x - v1x);
v1x = 0;
}
if (v2x >= Device::Width) v2x = Device::Width - 1;
int16 *depthBuffer = &Device::DepthBuffer[v1x + y * Device::Width];
sbyte *stencilBuffer = &Device::StencilBuffer[v1x + y * Device::Width];
float z = v1z;
int32 subdivs = ((v2x - v1x) >> SubdivExponent) + 1;
for (int32 i = 0; i < subdivs; i++)
{
int32 pixels = i < subdivs - 1 ? Subdiv1 : (v2x - v1x) & SubdivModulo;
for (int32 i = 0; i < pixels; i++)
{
if (*depthBuffer > z * 65536)
{
if (IsLastTriangleCulled)
{
(*stencilBuffer)++;
}
else
{
(*stencilBuffer)--;
}
}
stencilBuffer++;
depthBuffer++;
z += Dz;
}
}
}
bool Drawer::getIsLastTriangleCulled()
{
return IsLastTriangleCulled;
} |
package com.softwaresandbox.patternshelloworld;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@RestController()
public class HelloWorldController {
@GetMapping(path = "/hello")
public String getNameBold(@RequestParam(required = false) String style) {
return new StyleFactory().create(style).printSelf();
}
}
|
#!/bin/sh
source /etc/profile
process_flag=$1
working_directory=$2
start_command=$3
now=$(date "+%Y-%m-%d %H:%M:%S")
# max log file count
log_file_max=30
# interval of run
step=15
if [[ "$4" -ge 5 && "$4" -le 60 ]]; then
step=$4
fi
for(( i = 0; i < 60; i=(i+step) )); do
# judge process running
count=`ps -fC java | grep $process_flag | wc -l`
if [ $count -eq 0 ]; then
# log
echo "$now $process_flag restart by $start_command">>$working_directory/$(date "+%Y-%m-%d")_restart.out
# start
cd $working_directory
nohup $start_command 1>/dev/null 2>&1 &
# clear log file
if [ $(find $working_directory -name "*_restart.out" | wc -l) -gt $log_file_max ]; then
rm -rf $(ls $working_directory/*_restart.out|head -n1)
fi
fi
#
if [ $step -ge 60 ];then
exit 0
else
sleep $step
fi
done
|
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
#ifndef _FA_DIGITIZER_T_H_
#define _FA_DIGITIZER_T_H_
#include "FAConfig.h"
#include "FARSDfaCA.h"
#include "FAState2OwCA.h"
#include "FAUtf32Utils.h"
namespace BlingFire
{
class FAAllocatorA;
///
/// Assigns a digitial value to the input chain acording to the input Moore
/// automaton or assigns m_AnyOw.
///
template < class Ty >
class FADigitizer_t {
public:
FADigitizer_t ();
~FADigitizer_t ();
public:
/// sets up digitizer automaton
void SetRsDfa (const FARSDfaCA * pDfa);
/// sets up state reaction
void SetState2Ow (const FAState2OwCA * pState2Ow);
/// sets up AnyOtherIw, 0 by default
void SetAnyIw (const int AnyIw);
/// sets up AnyOtherOw, 0 by default
void SetAnyOw (const int AnyOw);
/// converts input symbols into lower case
void SetIgnoreCase (const bool IgnoreCase);
/// this method must be called once after all automata set up
void Prepare ();
/// returns Ow corresponding to the Chain
const int Process (const Ty * pChain, const int Size) const;
private:
void Clear();
/// returns m_AnyIw if Iw not in m_pSymbol2Iw
inline const int Symbol2Iw (int Symbol) const;
private:
int m_AnyIw;
int m_AnyOw;
const FAState2OwCA * m_pState2Ow;
const FARSDfaCA * m_pDfa;
int m_MaxIw;
int * m_pSymbol2Iw;
bool m_IgnoreCase;
};
template < class Ty >
FADigitizer_t< Ty >::FADigitizer_t () :
m_AnyIw (0),
m_AnyOw (0),
m_pState2Ow (NULL),
m_pDfa (NULL),
m_MaxIw (0),
m_pSymbol2Iw (NULL),
m_IgnoreCase (false)
{}
template < class Ty >
FADigitizer_t< Ty >::~FADigitizer_t ()
{
FADigitizer_t< Ty >::Clear ();
}
template < class Ty >
void FADigitizer_t< Ty >::Clear ()
{
if (m_pSymbol2Iw) {
delete [] m_pSymbol2Iw;
m_pSymbol2Iw = NULL;
}
}
template < class Ty >
void FADigitizer_t< Ty >::SetRsDfa (const FARSDfaCA * pDfa)
{
m_pDfa = pDfa;
}
template < class Ty >
void FADigitizer_t< Ty >::SetState2Ow (const FAState2OwCA * pState2Ow)
{
m_pState2Ow = pState2Ow;
}
template < class Ty >
void FADigitizer_t< Ty >::SetAnyIw (const int AnyIw)
{
m_AnyIw = AnyIw;
}
template < class Ty >
void FADigitizer_t< Ty >::SetAnyOw (const int AnyOw)
{
m_AnyOw = AnyOw;
}
template < class Ty >
void FADigitizer_t< Ty >::SetIgnoreCase (const bool IgnoreCase)
{
m_IgnoreCase = IgnoreCase;
}
template < class Ty >
void FADigitizer_t< Ty >::Prepare ()
{
LogAssert (m_pDfa);
const int IwsCount = m_pDfa->GetIWs (NULL, 0);
LogAssert (0 < IwsCount);
int * pIws = new int [IwsCount];
LogAssert (pIws);
m_pDfa->GetIWs (pIws, IwsCount);
DebugLogAssert (pIws && FAIsSortUniqed (pIws, IwsCount));
m_MaxIw = pIws [IwsCount - 1];
if (m_pSymbol2Iw) {
delete [] m_pSymbol2Iw;
m_pSymbol2Iw = NULL;
}
m_pSymbol2Iw = new int [m_MaxIw + 1];
LogAssert (m_pSymbol2Iw);
for (int Iw = 0; Iw <= m_MaxIw; ++Iw) {
m_pSymbol2Iw [Iw] = m_AnyIw;
}
for (int iw_idx = 0; iw_idx < IwsCount; ++iw_idx) {
const int Iw = pIws [iw_idx];
m_pSymbol2Iw [Iw] = Iw;
}
delete [] pIws;
pIws = NULL;
}
template < class Ty >
inline const int FADigitizer_t< Ty >::Symbol2Iw (int Symbol) const
{
DebugLogAssert (0 <= Symbol);
if (m_IgnoreCase) {
Symbol = FAUtf32ToLower (Symbol);
}
if (0 <= Symbol && Symbol <= m_MaxIw) {
const int Iw = m_pSymbol2Iw [Symbol];
return Iw;
} else {
return m_AnyIw;
}
}
template < class Ty >
const int FADigitizer_t< Ty >::Process (const Ty * pChain, const int Size) const
{
DebugLogAssert (m_pDfa);
DebugLogAssert (m_pState2Ow);
int State = m_pDfa->GetInitial ();
for (int i = 0; i < Size; ++i) {
DebugLogAssert (pChain);
// convert input symbol into integer symbol
const int Symbol = pChain [i];
// convert input integer symbol into Iw
const int Iw = Symbol2Iw (Symbol);
// get go to the following state
State = m_pDfa->GetDest (State, Iw);
if (-1 == State) {
return m_AnyOw;
}
}
// get the Ow
const int Ow = m_pState2Ow->GetOw (State);
if (-1 != Ow)
return Ow;
else
return m_AnyOw;
}
}
#endif
|
<filename>pecado-dp/pecado-dp-platform/src/main/java/me/batizhao/dp/mapper/CodeMetaMapper.java
package me.batizhao.dp.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import me.batizhao.dp.domain.CodeMeta;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* 生成代码元数据表
*
* @author batizhao
* @since 2021-02-01
*/
@Mapper
public interface CodeMetaMapper extends BaseMapper<CodeMeta> {
/**
* 查询表列信息
* @param tableName 表名称
* @return
*/
List<CodeMeta> selectColumnsByTableName(@Param("tableName") String tableName);
}
|
<filename>src/main/java/es/upv/grycap/tracer/model/TraceResource.java
package es.upv.grycap.tracer.model;
import java.io.Serializable;
import javax.persistence.Entity;
import javax.persistence.Id;
import com.sun.istack.NotNull;
import es.upv.grycap.tracer.model.dto.HashType;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
@Getter
@Setter
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Entity
public class TraceResource implements Serializable {
private static final long serialVersionUID = -9144643513573832608L;
/**
* The ID of the file.
* Must protect file information
*/
@Id
protected String id;
/**
* Base64 encoded String of the hash
*/
protected String nameHash;
@NotNull
protected HashType nameHashType;
/**
* Base64 encoded String of the hash
*/
protected String pathHash;
@NotNull
protected HashType pathHashType;
/**
* Base64 encoded String of the hash
*/
protected String contentHash;
@NotNull
protected HashType contentHashType;
}
|
<reponame>mpollicito/controlled-confusion
import React, { Component } from "react";
function Gameover() {
return (
<h1>
"Gameover" Component
</h1>
);
}
export default Gameover; |
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* @Description : 扁平流的演化
* @Author: wuwangqiang
* @Version: 2020/4/18 0018 15:18
*
* 1.合并list或者数组 集合
*/
public class flapMap_group {
public static void main(String[] args) {
List<String> list = Arrays.asList("ab-cc-dd", "bb-cc", "cc-ab-dd", "dd-cc-ab");
/**1.会得到多个流*/
List<String[]> xx = list.stream().map(x -> x.split("-")).collect(Collectors.toList());
xx.forEach(a-> System.out.println(Arrays.toString(a)));
/**2.处理合并流*/
List<String> b = xx.stream().flatMap(x -> Arrays.stream(x)).collect(Collectors.toList());
System.out.println("--> 转换多个流为1个后:"+b);
// Map<String[], Long> list = list.stream().map(x -> x.split("-")).collect(Collectors.groupingBy(Function.identity(), Collectors.counting()));
/**3. 分组统计 */
Map<String, Long> collect1 = b.stream()
.collect(Collectors.groupingBy(Function.identity(), Collectors.counting()));
System.out.println(collect1);
/** 4.--> 统一简化上面的流程-- **/
Map<String, Long> result = list.stream().map(x -> x.split("-"))
.flatMap(x -> Arrays.stream(x))
.collect(Collectors.groupingBy(s -> s, Collectors.counting()));
System.out.println("简化的结果:"+result);
}
}
|
<reponame>chengweiou/leob<gh_stars>0
package chengweiou.universe.leob.controller.mg;
import chengweiou.universe.blackhole.exception.FailException;
import chengweiou.universe.blackhole.exception.ParamException;
import chengweiou.universe.blackhole.model.Builder;
import chengweiou.universe.blackhole.model.Rest;
import chengweiou.universe.blackhole.param.Valid;
import chengweiou.universe.leob.manager.FcmManager;
import chengweiou.universe.leob.model.Push;
import chengweiou.universe.leob.model.SearchCondition;
import chengweiou.universe.leob.model.entity.Device;
import chengweiou.universe.leob.service.device.DeviceService;
import com.google.firebase.messaging.Message;
import com.google.firebase.messaging.MulticastMessage;
import com.google.firebase.messaging.Notification;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
import java.util.stream.Collectors;
@RestController
@RequestMapping("mg")
public class PushController {
@Autowired
private FcmManager fcmManager;
@Autowired
private DeviceService deviceService;
@PostMapping("/push")
public Rest<Long> push(Push e) throws ParamException, FailException {
Valid.check("push.person", e.getPerson()).isNotNull();
Valid.check("push.person.id", e.getPerson().getId()).is().positive();
Valid.check("push.name", e.getName()).is().lengthIn(500);
Valid.check("push.content", e.getContent()).is().lengthIn(500);
List<Device> deviceList = deviceService.find(new SearchCondition(), Builder.set("person", e.getPerson()).to(new Device()));
List<String> tokenList = deviceList.stream().filter(device -> device.getActive()).map(Device::getToken).collect(Collectors.toList());
long failCount = fcmManager.send(MulticastMessage.builder().addAllTokens(tokenList)
.setNotification(Notification.builder().setTitle(e.getName()).setBody(e.getContent()).build()
).build());
return Rest.ok(failCount);
}
@PostMapping("/push/topic")
public Rest<Long> pushTopic(Push e) throws ParamException, FailException {
Valid.check("push.topic", e.getTopic()).is().lengthIn(500);
Valid.check("push.name", e.getName()).is().lengthIn(500);
Valid.check("push.content", e.getContent()).is().lengthIn(500);
// todo setcondition 多个同时订阅才有效(交钱+指定主题)
// todo 多个同样topic 不收到相同推送
// todo active 在这里怎么处理
fcmManager.send(Message.builder().setTopic(e.getTopic())
.setNotification(Notification.builder().setTitle(e.getName()).setBody(e.getContent()).build()
).build());
return Rest.ok(true);
}
}
|
# Given N integers, compute their average, rounded to three decimal places.
tmp=0;
cnt=0;
read total #skip first number (total number of input)
read num #first number
#while solution
# while [ -n "$num" ]
# do
# cnt=$[cnt+1]
# tmp=$[tmp+num]
# read num
# done
#for loop solution
#notice that there's no such thing as {1..$total}
for i in $(seq 1 1 $total)
do
cnt=$[cnt+1]
tmp=$[tmp+num]
read num
done
out=$(echo "$tmp/$cnt" | bc -l)
if [[ $out == "0" ]]; then echo 0.000
else printf %.3f $out
fi |
function isPrime(num) {
if (num <= 1) {
return false;
}
for (let i = 2; i < num; i++) {
if (num % i === 0) {
return false;
}
}
return true;
}
let num = 7;
if (isPrime(num)) {
console.log(num + ' is a prime number');
} else {
console.log(num + ' is not a prime number');
} |
#!/bin/sh
echo "apiVersion: apps/v1
kind: DaemonSet
metadata:
name: firefox
spec:
selector:
matchLabels:
website: firefox
template:
metadata:
labels:
website: firefox
spec:
containers:
- name: firefox
image: jlesage/firefox
imagePullPolicy: IfNotPresent
ports:
- containerPort: 5800
hostPort: 32000
hostAliases:"
for svc in $(kubectl get svc -o custom-columns=:.metadata.name)
do
ip=$(kubectl get svc $svc -o go-template={{.spec.clusterIP}})
echo " - ip: \"$ip\"
hostnames:
- \"$svc.in\""
done |
import numpy as np
def find_leftover_ranks(all_ranks, ranklist):
leftover_ranks = np.setdiff1d(all_ranks, ranklist)
return leftover_ranks.tolist() |
<filename>src/web/endpoint/message/delete.ts
import deleteMessage, { facts } from "../../api/methods/message/delete"
import { TurboServer } from "../../turbo"
import { getRemoteIpAddress } from "../../remoteIpAddress"
export default (server: TurboServer) => {
server.post(facts, async (req, res, params) => {
const remoteIpAddress = getRemoteIpAddress(req.headers)
const succeeded = await deleteMessage(
{
id: Math.trunc(req.body.id),
},
remoteIpAddress,
params["authUser"]
)
return {
ok: succeeded,
}
})
}
|
<reponame>infamousSs/zod<filename>zod-lib/http-fwk/http-fwk-core/src/test/java/com/infamous/framework/http/factory/ParameterHandler_PartTest.java
package com.infamous.framework.http.factory;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import com.infamous.framework.http.Part;
import com.infamous.framework.http.ZodHttpException;
import com.infamous.framework.http.core.BodyPart;
import com.infamous.framework.http.core.HttpRequest;
import com.infamous.framework.http.core.HttpRequestMultiPart;
import com.infamous.framework.http.core.HttpRequestWithBody;
import java.io.File;
import java.io.InputStream;
import java.util.Arrays;
import org.junit.jupiter.api.Test;
public class ParameterHandler_PartTest {
@Test
public void test_whenHttpDoesNotSupportPart() throws Exception {
ParameterHandler.Part parameterHandler = new ParameterHandler.Part(getAnnotation());
HttpRequest request = mock(HttpRequest.class);
assertThrows(ZodHttpException.class, () -> parameterHandler.apply(request, "1234"));
}
@Test
public void test_whenHttpRequestIsInstanceOfHttpRequestWithBody() throws Exception {
HttpRequestWithBody request = testCaseFor(HttpRequestWithBody.class);
verify(request, times(6)).part(any(BodyPart.class));
verifyNoMoreInteractions(request);
}
@Test
public void test_whenValueIsCollection() throws Exception {
int[] times = {0};
boolean[] invoked = new boolean[]{false, false};
HttpRequestWithBody request = mockHttpRequest(HttpRequestWithBody.class);
HttpRequestMultiPart requestMultiPart = mock(HttpRequestMultiPart.class);
doAnswer((invocationOnMock -> {
times[0]++;
if (times[0] == 1) {
BodyPart bodyPart = invocationOnMock.getArgument(0);
assertEquals("files", bodyPart.getName());
assertEquals("12", bodyPart.getValue());
assertEquals("application/x-www-form-urlencoded", bodyPart.getContentType());
invoked[0] = true;
return requestMultiPart;
}
return null;
})).when(request).part(any(BodyPart.class));
doAnswer(invocationOnMock -> {
times[0]++;
if (times[0] == 2) {
BodyPart bodyPart = invocationOnMock.getArgument(0);
assertEquals("files", bodyPart.getName());
assertEquals("123", bodyPart.getValue());
assertEquals("application/x-www-form-urlencoded", bodyPart.getContentType());
invoked[1] = true;
}
return null;
}).when(requestMultiPart).part(any(BodyPart.class));
ParameterHandler.Part parameterHandler = new ParameterHandler.Part(getAnnotation());
apply(parameterHandler, request, Arrays.asList("12", "123"));
assertEquals(times[0], 2);
assertTrue(invoked[0]);
assertTrue(invoked[1]);
}
@Test
public void test_whenHttpRequestIsInstanceOfHttpRequestMultiPart() throws Exception {
HttpRequestMultiPart request = testCaseFor(HttpRequestMultiPart.class);
verify(request, times(6)).part(any(BodyPart.class));
verifyNoMoreInteractions(request);
}
private <T> T mockHttpRequest(Class<T> clazz) {
return mock(clazz);
}
private HttpRequest apply(ParameterHandler.Part partHandler, HttpRequest request, Object data) throws Exception {
return partHandler.apply(request, data);
}
private <T> T testCaseFor(Class<T> clazz) throws Exception {
ParameterHandler.Part parameterHandler = new ParameterHandler.Part(getAnnotation());
T request = mockHttpRequest(clazz);
apply(parameterHandler, (HttpRequest) request, "1234");
apply(parameterHandler, (HttpRequest) request, 123);
byte[] bytes = new byte[0];
apply(parameterHandler, (HttpRequest) request, bytes);
apply(parameterHandler, (HttpRequest) request, mock(InputStream.class));
apply(parameterHandler, (HttpRequest) request, mock(File.class));
apply(parameterHandler, (HttpRequest) request, mock(BodyPart.class));
return request;
}
private Part getAnnotation() throws NoSuchMethodException {
return (Part) (RestClientTest.class.getMethod("testWithMultipartBody", String.class))
.getParameterAnnotations()[0][0];
}
}
|
<reponame>microdotblog/microblog-ios
//
// RFBookmarkController.h
// Micro.blog
//
// Created by <NAME> on 9/5/20.
// Copyright © 2020 Riverfold Software. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface RFBookmarkController : UIViewController
@property (strong, nonatomic) IBOutlet UITextField* urlField;
@property (strong, nonatomic) IBOutlet UIActivityIndicatorView* progressSpinner;
@end
NS_ASSUME_NONNULL_END
|
<reponame>NegriAndrea/pyxsim
"""
Answer test pyxsim.
"""
from pyxsim import \
ThermalSourceModel, \
PhotonList, merge_files, EventList
from yt.utilities.answer_testing.framework import requires_ds, \
GenericArrayTest, data_dir_load
from numpy.testing import assert_array_equal, \
assert_allclose
from numpy.random import RandomState
from yt.units.yt_array import uconcatenate
import os
import tempfile
import shutil
import astropy.io.fits as pyfits
def setup():
from yt.config import ytcfg
ytcfg["yt", "__withintesting"] = "True"
gslr = "GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0300"
def return_data(data):
def _return_data(name):
return data
return _return_data
@requires_ds(gslr)
def test_sloshing():
tmpdir = tempfile.mkdtemp()
curdir = os.getcwd()
os.chdir(tmpdir)
prng = RandomState(0x4d3d3d3)
ds = data_dir_load(gslr)
A = 2000.
exp_time = 1.0e4
redshift = 0.1
sphere = ds.sphere("c", (0.1, "Mpc"))
sphere.set_field_parameter("X_H", 0.75)
thermal_model = ThermalSourceModel("apec", 0.1, 11.0, 10000, Zmet=0.3,
thermal_broad=False, prng=prng)
photons1 = PhotonList.from_data_source(sphere, redshift, A, exp_time,
thermal_model)
return_photons = return_data(photons1.photons)
nphots = 0
for i in range(4):
phots = PhotonList.from_data_source(sphere, redshift, A, 0.25*exp_time,
thermal_model)
phots.write_h5_file("split_photons_%d.h5" % i)
nphots += len(phots.photons["energy"])
merge_files(["split_photons_%d.h5" % i for i in range(4)],
"merged_photons.h5", add_exposure_times=True,
overwrite=True)
merged_photons = PhotonList.from_file("merged_photons.h5")
assert len(merged_photons.photons["energy"]) == nphots
assert merged_photons.parameters["fid_exp_time"] == exp_time
events1 = photons1.project_photons([1.0,-0.5,0.2], [30., 45.],
absorb_model="tbabs", nH=0.1, prng=prng)
return_events = return_data(events1.events)
events1.write_spectrum("test_events_spec.fits", 0.2, 10.0, 2000)
f = pyfits.open("test_events_spec.fits")
return_spec = return_data(f["SPECTRUM"].data["COUNTS"])
f.close()
events1.write_fits_image("test_events_img.fits", (20.0, "arcmin"),
1024)
f = pyfits.open("test_events_img.fits")
return_img = return_data(f[0].data)
f.close()
tests = [GenericArrayTest(ds, return_photons, args=["photons"]),
GenericArrayTest(ds, return_events, args=["events"]),
GenericArrayTest(ds, return_spec, args=["spec"]),
GenericArrayTest(ds, return_img, args=["img"])]
for test in tests:
test_sloshing.__name__ = test.description
yield test
photons1.write_h5_file("test_photons.h5")
events1.write_h5_file("test_events.h5")
events1.write_fits_file("test_events.fits", 20.0, 1024)
photons2 = PhotonList.from_file("test_photons.h5")
events2 = EventList.from_h5_file("test_events.h5")
events3 = EventList.from_fits_file("test_events.fits")
for k in photons1.keys():
if k == "energy":
arr1 = uconcatenate(photons1[k])
arr2 = uconcatenate(photons2[k])
else:
arr1 = photons1[k]
arr2 = photons2[k]
assert_array_equal(arr1, arr2)
for k in events2.keys():
assert_array_equal(events1[k], events2[k])
assert_allclose(events2[k], events3[k], rtol=1.0e-6)
nevents = 0
for i in range(4):
events = photons1.project_photons([1.0, -0.5, 0.2], [30., 45.],
absorb_model="tbabs", nH=0.1,
prng=prng)
events.write_h5_file("split_events_%d.h5" % i)
nevents += len(events["xsky"])
merge_files(["split_events_%d.h5" % i for i in range(4)],
"merged_events.h5", add_exposure_times=True,
overwrite=True)
merged_events = EventList.from_h5_file("merged_events.h5")
assert len(merged_events["xsky"]) == nevents
assert merged_events.parameters["exp_time"] == 4.0*exp_time
os.chdir(curdir)
shutil.rmtree(tmpdir)
|
#!/bin/bash
# shellcheck disable=SC2164
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Outline of this file.
# 0. Initialization and helper methods.
# 1. Installation of dependencies.
# 2. Installation of Go tools and vendored Go dependencies.
# 3. Detection of installed MySQL and setting MYSQL_FLAVOR.
# 4. Installation of development related steps e.g. creating Git hooks.
#
# 0. Initialization and helper methods.
#
# Run parallel make, based on number of cores available.
case $(uname) in
Linux) NB_CORES=$(grep -c '^processor' /proc/cpuinfo);;
Darwin) NB_CORES=$(sysctl hw.ncpu | awk '{ print $2 }');;
esac
if [ -n "$NB_CORES" ]; then
export MAKEFLAGS="-j$((NB_CORES+1)) -l${NB_CORES}"
fi
function fail() {
echo "ERROR: $1"
exit 1
}
[[ "$(dirname "$0")" = "." ]] || fail "bootstrap.sh must be run from its current directory"
go version &>/dev/null || fail "Go is not installed or is not on \$PATH"
# Set up the proper GOPATH for go get below.
source ./dev.env
# Create main directories.
mkdir -p "$VTROOT/dist"
mkdir -p "$VTROOT/bin"
mkdir -p "$VTROOT/lib"
mkdir -p "$VTROOT/vthook"
# Set up required soft links.
# TODO(mberlin): Which of these can be deleted?
ln -snf "$VTTOP/config" "$VTROOT/config"
ln -snf "$VTTOP/data" "$VTROOT/data"
ln -snf "$VTTOP/py" "$VTROOT/py-vtdb"
ln -snf "$VTTOP/go/vt/zkctl/zksrv.sh" "$VTROOT/bin/zksrv.sh"
ln -snf "$VTTOP/test/vthook-test.sh" "$VTROOT/vthook/test.sh"
ln -snf "$VTTOP/test/vthook-test_backup_error" "$VTROOT/vthook/test_backup_error"
ln -snf "$VTTOP/test/vthook-test_backup_transform" "$VTROOT/vthook/test_backup_transform"
# install_dep is a helper function to generalize the download and installation of dependencies.
#
# If the installation is successful, it puts the installed version string into
# the $dist/.installed_version file. If the version has not changed, bootstrap
# will skip future installations.
function install_dep() {
if [[ $# != 4 ]]; then
fail "install_dep function requires exactly 4 parameters (and not $#). Parameters: $*"
fi
local name="$1"
local version="$2"
local dist="$3"
local install_func="$4"
version_file="$dist/.installed_version"
if [[ -f "$version_file" && "$(cat "$version_file")" == "$version" ]]; then
echo "skipping $name install. remove $dist to force re-install."
return
fi
echo "installing $name $version"
# shellcheck disable=SC2064
trap "fail '$name build failed'; exit 1" ERR
# Cleanup any existing data and re-create the directory.
rm -rf "$dist"
mkdir -p "$dist"
# Change $CWD to $dist before calling "install_func".
pushd "$dist" >/dev/null
# -E (same as "set -o errtrace") makes sure that "install_func" inherits the
# trap. If here's an error, the trap will be called which will exit this
# script.
set -E
$install_func "$version" "$dist"
set +E
popd >/dev/null
trap - ERR
echo "$version" > "$version_file"
}
#
# 1. Installation of dependencies.
#
# Install the gRPC Python library (grpcio) and the protobuf gRPC Python plugin (grpcio-tools) from PyPI.
# Dependencies like the Python protobuf package will be installed automatically.
function install_grpc() {
local version="$1"
local dist="$2"
# Python requires a very recent version of virtualenv.
# We also require a recent version of pip, as we use it to
# upgrade the other tools.
# For instance, setuptools doesn't work with pip 6.0:
# https://github.com/pypa/setuptools/issues/945
# (and setuptools is used by grpc install).
grpc_virtualenv="$dist/usr/local"
$VIRTUALENV -v "$grpc_virtualenv"
PIP=$grpc_virtualenv/bin/pip
$PIP install --upgrade pip
$PIP install --upgrade --ignore-installed virtualenv
grpcio_ver=$version
$PIP install --upgrade grpcio=="$grpcio_ver" grpcio-tools=="$grpcio_ver"
}
install_dep "gRPC" "1.10.0" "$VTROOT/dist/grpc" install_grpc
# Install Zookeeper.
function install_zookeeper() {
local version="$1"
local dist="$2"
zk="zookeeper-$version"
wget "http://apache.org/dist/zookeeper/$zk/$zk.tar.gz"
tar -xzf "$zk.tar.gz"
mkdir -p lib
cp "$zk/contrib/fatjar/$zk-fatjar.jar" lib
# TODO(sougou): when version changes, see if we can drop the 'zip -d' hack to get the fatjars working.
# If yes, also delete "zip" from the Dockerfile files and the manual build instructions again.
# 3.4.10 workaround: Delete META-INF files which should not be in there.
zip -d "lib/$zk-fatjar.jar" 'META-INF/*.SF' 'META-INF/*.RSA' 'META-INF/*SF'
rm -rf "$zk" "$zk.tar.gz"
}
zk_ver=3.4.10
install_dep "Zookeeper" "$zk_ver" "$VTROOT/dist/vt-zookeeper-$zk_ver" install_zookeeper
# Download and install etcd, link etcd binary into our root.
function install_etcd() {
local version="$1"
local dist="$2"
download_url=https://github.com/coreos/etcd/releases/download
tar_file="etcd-${version}-linux-amd64.tar.gz"
wget "$download_url/$version/$tar_file"
tar xzf "$tar_file"
rm "$tar_file"
ln -snf "$dist/etcd-${version}-linux-amd64/etcd" "$VTROOT/bin/etcd"
}
install_dep "etcd" "v3.1.0-rc.1" "$VTROOT/dist/etcd" install_etcd
# Download and install consul, link consul binary into our root.
function install_consul() {
local version="$1"
local dist="$2"
download_url=https://releases.hashicorp.com/consul
wget "${download_url}/${version}/consul_${version}_linux_amd64.zip"
unzip "consul_${version}_linux_amd64.zip"
ln -snf "$dist/consul" "$VTROOT/bin/consul"
}
install_dep "Consul" "1.0.6" "$VTROOT/dist/consul" install_consul
# Install py-mock.
function install_pymock() {
local version="$1"
local dist="$2"
# For some reason, it seems like setuptools won't create directories even with the --prefix argument
mkdir -p lib/python2.7/site-packages
PYTHONPATH=$(prepend_path "$PYTHONPATH" "$dist/lib/python2.7/site-packages")
export PYTHONPATH
pushd "$VTTOP/third_party/py" >/dev/null
tar -xzf "mock-$version.tar.gz"
cd "mock-$version"
$PYTHON ./setup.py install --prefix="$dist"
cd ..
rm -r "mock-$version"
popd >/dev/null
}
pymock_version=1.0.1
install_dep "py-mock" "$pymock_version" "$VTROOT/dist/py-mock-$pymock_version" install_pymock
# Download Selenium (necessary to run test/vtctld_web_test.py).
function install_selenium() {
local version="$1"
local dist="$2"
$VIRTUALENV "$dist"
PIP="$dist/bin/pip"
# PYTHONPATH is removed for `pip install` because otherwise it can pick up go/dist/grpc/usr/local/lib/python2.7/site-packages
# instead of go/dist/selenium/lib/python3.5/site-packages and then can't find module 'pip._vendor.requests'
PYTHONPATH='' $PIP install selenium
}
install_dep "Selenium" "latest" "$VTROOT/dist/selenium" install_selenium
# Download chromedriver (necessary to run test/vtctld_web_test.py).
function install_chromedriver() {
local version="$1"
local dist="$2"
curl -sL "http://chromedriver.storage.googleapis.com/$version/chromedriver_linux64.zip" > chromedriver_linux64.zip
unzip -o -q chromedriver_linux64.zip -d "$dist"
rm chromedriver_linux64.zip
}
install_dep "chromedriver" "2.25" "$VTROOT/dist/chromedriver" install_chromedriver
#
# 2. Installation of Go tools and vendored Go dependencies.
#
# Install third-party Go tools used as part of the development workflow.
#
# DO NOT ADD LIBRARY DEPENDENCIES HERE. Instead use govendor as described below.
#
# Note: We explicitly do not vendor the tools below because a) we want to stay
# on their latest version and b) it's easier to "go install" them this way.
gotools=" \
github.com/golang/lint/golint \
github.com/golang/mock/mockgen \
github.com/kardianos/govendor \
golang.org/x/tools/cmd/cover \
golang.org/x/tools/cmd/goimports \
golang.org/x/tools/cmd/goyacc \
honnef.co/go/tools/cmd/unused \
"
echo "Installing dev tools with 'go get'..."
# shellcheck disable=SC2086
go get -u $gotools || fail "Failed to download some Go tools with 'go get'. Please re-run bootstrap.sh in case of transient errors."
# Download dependencies that are version-pinned via govendor.
#
# To add a new dependency, run:
# govendor fetch <package_path>
#
# Existing dependencies can be updated to the latest version with 'fetch' as well.
#
# Then:
# git add vendor/vendor.json
# git commit
#
# See https://github.com/kardianos/govendor for more options.
echo "Updating govendor dependencies..."
govendor sync || fail "Failed to download/update dependencies with govendor. Please re-run bootstrap.sh in case of transient errors."
#
# 3. Detection of installed MySQL and setting MYSQL_FLAVOR.
#
# find mysql and prepare to use libmysqlclient
if [ -z "$MYSQL_FLAVOR" ]; then
export MYSQL_FLAVOR=MySQL56
echo "MYSQL_FLAVOR environment variable not set. Using default: $MYSQL_FLAVOR"
fi
case "$MYSQL_FLAVOR" in
"MySQL56")
myversion="$("$VT_MYSQL_ROOT/bin/mysql" --version)"
[[ "$myversion" =~ Distrib\ 5\.[67] ]] || fail "Couldn't find MySQL 5.6+ in $VT_MYSQL_ROOT. Set VT_MYSQL_ROOT to override search location."
echo "Found MySQL 5.6+ installation in $VT_MYSQL_ROOT."
;;
"MariaDB")
myversion="$("$VT_MYSQL_ROOT/bin/mysql" --version)"
[[ "$myversion" =~ MariaDB ]] || fail "Couldn't find MariaDB in $VT_MYSQL_ROOT. Set VT_MYSQL_ROOT to override search location."
echo "Found MariaDB installation in $VT_MYSQL_ROOT."
;;
*)
fail "Unsupported MYSQL_FLAVOR $MYSQL_FLAVOR"
;;
esac
# save the flavor that was used in bootstrap, so it can be restored
# every time dev.env is sourced.
echo "$MYSQL_FLAVOR" > "$VTROOT/dist/MYSQL_FLAVOR"
#
# 4. Installation of development related steps e.g. creating Git hooks.
#
# Create the Git hooks.
echo "creating git hooks"
mkdir -p "$VTTOP/.git/hooks"
ln -sf "$VTTOP/misc/git/pre-commit" "$VTTOP/.git/hooks/pre-commit"
ln -sf "$VTTOP/misc/git/prepare-commit-msg.bugnumber" "$VTTOP/.git/hooks/prepare-commit-msg"
ln -sf "$VTTOP/misc/git/commit-msg" "$VTTOP/.git/hooks/commit-msg"
(cd "$VTTOP" && git config core.hooksPath "$VTTOP/.git/hooks")
echo
echo "bootstrap finished - run 'source dev.env' in your shell before building."
|
const traverse = (node) => {
console.log(node.val);
node.children.forEach((child) => {
traverse(child)
});
};
const tree = {
val: 'A',
children: [
{
val: 'B',
children: [
{
val: 'C',
children: []
},
{
val: 'D',
children: []
},
{
val: 'E',
children: []
}
]
}
]
};
traverse(tree);
// prints A, B, C, D, E |
#! /usr/bin/env node
import { Command } from 'commander';
import generateConfig from './commands/generate-config';
import compound from './commands/compound';
import explorer from './commands/explorer';
const program = new Command().version('1.0.0');
program
.addCommand(generateConfig)
.addCommand(compound)
.addCommand(explorer)
.parse();
|
import play.PlayScala
name := """test"""
version := "1.0-SNAPSHOT"
lazy val root = (project in file(".")).enablePlugins(PlayScala)
scalaVersion := "2.11.1"
libraryDependencies ++= Seq(
jdbc,
anorm,
cache,
ws
)
libraryDependencies += "org.scalaj" %% "scalaj-http" % "1.1.0"
libraryDependencies += "io.spray" %% "spray-json" % "1.3.1"
|
fn push(&mut self, term: Term) -> Result<usize, String> {
let term_size = term.size(); // Assuming size() returns the memory size of the term
let available_space = self.fragment.available_space(); // Assuming available_space() returns the remaining space in the heap fragment
if term_size > available_space {
return Err("Insufficient memory to add the term to the heap".to_string());
}
self.terms.push(term);
Ok(self.terms.len() - 1) // Return the index at which the term was added
} |
const Promise = require('bluebird');
const request = require('request-promise');
const Elasticsearch = require('elasticsearch');
const esRequestBuilder = require('bodybuilder');
const moment = require('moment');
const _ = require('lodash');
const KubernetesService = require('./kubernetes.service');
const SwaggerService = require('./swagger.service');
const K8SUtils = require('../utils/kubernetes.utils');
const config = require('../../config/index');
const BASE_URLs = config.elasticSearch.hosts;
const TYPE = 'api-catalog';
const CATALOG_BASE_INDEX = 'api-catalog-';
const REQUEST_SIZE = config.elasticSearch.requestSize;
const client = new Elasticsearch.Client({
hosts: BASE_URLs,
apiVersion: '2.4',
log: 'warning'
});
exports.getServiceGroups = (env) => {
env = env || '*';
const options = {
uri: `${BASE_URLs[0]}/${CATALOG_BASE_INDEX}${env}/${TYPE}/_search`,
json: true,
body: {
size: 0,
aggregations: {
uniqueServiceGroups: {
terms: {
field: 'serviceGroup',
order: { '_term': 'asc' },
size: 0
}
}
}
}
};
return request.get(options)
.then(response => ({
total: _.get(response, 'hits.total', 0),
serviceGroups: _.get(response, 'aggregations.uniqueServiceGroups.buckets', [])
}));
};
// env = dev|qa|prod|*
exports.searchAPICatalog = (env, serviceGroup, searchTerms) => {
env = env || '*';
const searchRequest = {
index: `${CATALOG_BASE_INDEX}${env}`,
type: TYPE,
size: REQUEST_SIZE,
_sourceExclude: ['spec', '@timestamp']
};
let bodyBuilder = esRequestBuilder();
if (serviceGroup) {
bodyBuilder = bodyBuilder.query('match', 'serviceGroup', serviceGroup);
}
if (searchTerms) {
const searchTermsRequest = searchTerms.replace(/\s*,\s*/g, ',').split(',').map(searchTerm => `*${searchTerm}*`).join(' AND ');
bodyBuilder = bodyBuilder.query('query_string', {
query: searchTermsRequest,
fields: ['namespace', 'serviceGroup', 'serviceName', 'spec', 'repo.owner', 'repo.name']
});
}
// sort Swaggers by serviceGroup, then serviceName, then namespace
bodyBuilder = bodyBuilder.sort([
{
serviceGroup: {
order: 'asc',
ignore_unmapped: true
}
},
{
serviceName: {
order: 'asc',
ignore_unmapped: true
}
},
{
namespace: {
order: 'asc',
ignore_unmapped: true
}
}
]);
searchRequest.body = bodyBuilder.build();
return Promise.resolve(client.search(searchRequest))
.then(response => response.hits.hits)
.map(hit => hit._source);
};
exports.writeApiEntries = () => {
return Promise.resolve(KubernetesService.getNamespaces())
.mapSeries(namespace => this.writeApiEntry(namespace));
};
exports.writeApiEntry = (namespace) => {
if (!namespace) {
return;
}
const namespaceName = K8SUtils.extractNamespaceName(namespace);
const repoOwner = K8SUtils.extractNamespaceRepoOwner(namespace);
const repoName = K8SUtils.extractNamespaceRepoName(namespace);
const repoBranch = K8SUtils.extractNamespaceRepoBranch(namespace);
const env = K8SUtils.extractNamespaceEnvironment(namespaceName, repoBranch);
console.log('===== Started processing Swagger for', namespaceName, ' =====');
const entry = {
// namespaceName is always unique, thus a good candidate for _id and avoid dupes
id: `${TYPE}-${namespaceName}`,
index: `${CATALOG_BASE_INDEX}${env}`,
type: TYPE,
timeout: '2s',
body: {
doc: {
'@timestamp': moment.utc(),
namespace: namespaceName,
repo: {
owner: repoOwner,
name: repoName
}
},
doc_as_upsert: true
}
};
return KubernetesService.getAllNamespaceIngressesServiceLabels(namespaceName)
.then(labels => {
// if we actually have labels
if (labels) {
const serviceGroup = labels['codekube.io/service.group'];
const serviceName = labels['codekube.io/service.name'];
if (serviceGroup) {
entry.body.doc.serviceGroup = serviceGroup;
}
if (serviceName) {
entry.body.doc.serviceName = serviceName;
}
console.log(`Got Ingress labels for ${namespaceName}, (${serviceGroup}, ${serviceName})`);
}
})
.catch(() => console.log('Unable to get Ingress labels for', namespaceName))
.then(() => SwaggerService.getSwaggerFile(namespaceName))
.then(swaggerJSON => {
// on frontend, we don't return the spec (too big), but only basic info
entry.body.doc.info = {
title: _.get(swaggerJSON, 'info.title'),
description: _.get(swaggerJSON, 'info.description')
};
// JSON.stringify, so we don't confuse ES with all the different Swagger mappings
entry.body.doc.spec = JSON.stringify(swaggerJSON);
})
.then(() => {
if (env && entry.body.doc.spec) {
console.log('Got Swagger spec for', namespaceName);
return client.update(entry)
.catch((error) => console.log('Error updating Swagger ES entry for', namespaceName, entry, error));
} else {
console.log(namespaceName, 'had no env or Swagger spec, skipping...');
}
})
// if we DON'T have a Swagger file, make sure we delete it from ES if it was there
.catch(() => {
console.log('Error getting Swagger file for', namespaceName, ', deleting existing ES entry, if any.');
return client.delete({ id: entry.id, type: entry.type, index: entry.index, timeout: '2s' })
// if it's not found in ES, it throws an error,
// so we catch it in order to continue the loop
.then(() => console.log(`Deleted ${entry.id} from API Catalog in ES`))
.catch(() => console.log(`Could not delete ${entry.id} from API Catalog in ES, erred or simply never existed`));
})
.then(() => console.log('===== Finished processing Swagger for', namespaceName, ' ====='));
};
|
#!/usr/bin/env bash
set -e
image="okd-4.1@sha256:67fe42feea8256f07069d776d4c4cecff6294ff8a5af67d719eca6c08548b45d"
source ${KUBEVIRTCI_PATH}/cluster/ephemeral-provider-common.sh
function _port() {
${_cli} ports --prefix $provider_prefix --container-name cluster "$@"
}
function _install_from_cluster() {
local src_cid="$1"
local src_file="$2"
local dst_perms="$3"
local dst_file="${KUBEVIRTCI_CONFIG_PATH}/$KUBEVIRT_PROVIDER/$4"
touch $dst_file
chmod $dst_perms $dst_file
docker exec $src_cid cat $src_file > $dst_file
}
function up() {
workers=$(($KUBEVIRT_NUM_NODES-1))
if [[ ( $workers < 1 ) ]]; then
workers=1
fi
echo "Number of workers: $workers"
params="--random-ports --background --prefix $provider_prefix --master-cpu 6 --workers-cpu 6 --workers-memory 8192 --secondary-nics ${KUBEVIRT_NUM_SECONDARY_NICS} --registry-volume $(_registry_volume) --workers $workers kubevirtci/${image}"
if [[ ! -z "${RHEL_NFS_DIR}" ]]; then
params=" --nfs-data $RHEL_NFS_DIR ${params}"
fi
if [[ ! -z "${OKD_CONSOLE_PORT}" ]]; then
params=" --ocp-console-port $OKD_CONSOLE_PORT ${params}"
fi
${_cli} run okd ${params}
# Copy k8s config and kubectl
cluster_container_id=$(docker ps -f "name=$provider_prefix-cluster" --format "{{.ID}}")
_install_from_cluster $cluster_container_id /usr/local/bin/oc 0755 .kubectl
_install_from_cluster $cluster_container_id /root/install/auth/kubeconfig 0644 .kubeconfig
# Set server and disable tls check
export KUBECONFIG=${KUBEVIRTCI_CONFIG_PATH}/$KUBEVIRT_PROVIDER/.kubeconfig
${KUBEVIRTCI_CONFIG_PATH}/$KUBEVIRT_PROVIDER/.kubectl config set-cluster test-1 --server=https://$(_main_ip):$(_port k8s)
${KUBEVIRTCI_CONFIG_PATH}/$KUBEVIRT_PROVIDER/.kubectl config set-cluster test-1 --insecure-skip-tls-verify=true
# Make sure that local config is correct
prepare_config
}
|
#!/bin/bash
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e # Exit immediately when one of the commands fails.
set -x # Verbose
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
EXAMPLES_DIR="$(realpath "${SCRIPT_DIR}/../examples")"
PROJECT_EXT=".xcodeproj"
WORKSPACE_EXT=".xcworkspace"
# Keep a list of iOS apps which should be excluded from the CI builds.
SKIPPED_BUILDS="
gesture_classification/ios
"
function build_ios_example {
# Check if this directory appears in the skipped builds list.
RELATIVE_DIR="${1#"${EXAMPLES_DIR}/"}"
if echo "${SKIPPED_BUILDS}" | grep -qx "${RELATIVE_DIR}"; then
echo "WARNING: Skipping build for ${RELATIVE_DIR}."
return 0
fi
echo "=== BUILD STARTED: ${RELATIVE_DIR} ==="
pushd "$1" > /dev/null
# Cleanly install the dependencies
# Retry a few times to workaround intermittent download errors.
MAX_RETRY=3
INSTALLED=false
for i in $(seq 1 ${MAX_RETRY})
do
echo "Trying to install dependencies... (trial $i)"
if pod install --verbose --repo-update --clean-install; then
INSTALLED=true
break
fi
done
if [[ "${INSTALLED}" == false ]]; then
echo "Exceeded the max retry limit (${MAX_RETRY}) of pod install command."
exit 1
fi
# Extract the scheme names.
PROJECT_NAME="$(find * -maxdepth 0 -type d -name "*${PROJECT_EXT}")"
WORKSPACE_NAME="$(find * -type d -name "*${WORKSPACE_EXT}")"
SCHEMES="$(xcodebuild -list -project "${PROJECT_NAME}" -json | jq -r ".project.schemes[]")"
# Build each scheme without code signing.
for scheme in ${SCHEMES}; do
# Due to an unknown issue prior to Xcode 11.4, a non-existing test scheme
# might appear in the list of project schemes. For now, if a scheme name
# contains the word "Tests", skip the build for that particular scheme.
if [[ "${scheme}" == *"Tests"* ]]; then
continue
fi
echo "--- BUILDING SCHEME ${scheme} FOR PROJECT ${RELATIVE_DIR} ---"
set -o pipefail && xcodebuild \
CODE_SIGN_IDENTITY="" \
CODE_SIGNING_REQUIRED="NO" \
CODE_SIGN_ENTITLEMENTS="" \
CODE_SIGNING_ALLOWED="NO" \
ARCHS="arm64" \
-scheme "${scheme}" \
-workspace "${WORKSPACE_NAME}" \
| xcpretty # Pretty print the build output.
echo "--- FINISHED BUILDING SCHEME ${scheme} FOR PROJECT ${RELATIVE_DIR} ---"
done
popd > /dev/null
echo "=== BUILD FINISHED: ${RELATIVE_DIR} ==="
echo
echo
}
build_ios_example "$1"
|
<reponame>xxBeWolfxx/DroneProjectTello
import time
from tello import Tello
import cv2
import csv
import math
class TelloBird(Tello):
currentState = "landed"
statusOfMission = False
listOfStates = ["landed", "in-air", "moving", "turning", "too-weak"]
listOfMissions = ["basicMisssionL", "squareMissionL", "squareMissionT", "takeOffMission", "circleMissionL","eightMissionL","circleCurveMissionL", "test"]
minimalTimeWaiting = 3.0
def __init__(self):
super().__init__()
self.detector = cv2.QRCodeDetector()
self.wait(self.minimalTimeWaiting)
self.land()
self.__getData__()
self.bbox = None
if self.battery <= 10:
print("Too low battery, recharge: ", self.battery)
self.currentState = self.listOfStates[-1]
def watingForMission(self):
if self.statusOfMission == True:
return 0
if self.last_frame is not None:
data, self.bbox, straigt_qrcode = self.detector.detectAndDecode(self.last_frame)
if self.bbox is not None:
print(data)
if data != "":
commands = self.__validadtionCommand__(data.split())
index = self.listOfMissions.index(commands[0])
self.statusOfMission = True
self.startMission(index, commands[1])
self.bbox = None
def __endingMission__(self):
self.statusOfMission = False
def __validadtionCommand__(self, commands):
if len(commands) > 1:
return commands
commands.append("None")
return commands
def __getData__(self):
self.battery = self.get_battery()
"""TODO: Add more settings"""
def EmergencyCall(self):
print("Emergency call!!!")
self.__saveLogs__()
self.emergency()
self.streamoff()
self.land()
def basicMisssionL(self) -> int:
if self.currentState == self.listOfStates[0]:
self.sendingCommand(self.takeoff(), 1)
self.currentState = self.listOfStates[1]
self.sendingCommand(self.land(), 1)
self.currentState = self.listOfStates[0]
self.__endingMission__()
return 1
else:
print("I can't start a mission, I am flying")
self.__endingMission__()
return 0
def test(self, distance):
if self.currentState == self.listOfStates[0]:
self.takeoff()
self.currentState = self.listOfStates[1]
self.sendingCommand(self.forward(int(distance)), 2)
self.sendingCommand(self.cw(180), 1)
self.sendingCommand(self.forward(int(distance)),2)
self.sendingCommand(self.land(),1)
self.currentState = self.listOfStates[0]
self.__endingMission__()
return 1
else:
print("I can't start a mission, I am flying")
self.__endingMission__()
return 0
def squareMissionL(self, distance):
if self.currentState == self.listOfStates[0]:
self.sendingCommand(self.takeoff(),1)
self.currentState = self.listOfStates[1]
for i in range(4):
self.sendingCommand(self.forward(distance),2)
self.sendingCommand(self.cw(90),1)
self.sendingCommand(self.land(),1)
self.currentState = self.listOfStates[1]
else:
print("Something went wrong mate :(")
self.__endingMission__()
def squareMissionT(self, distance):
if self.currentState == self.listOfStates[1]:
for i in range(4):
self.sendingCommand(self.forward(int(distance)),1)
self.sendingCommand(self.cw(90),1)
self.sendingCommand(self.land(),1)
self.currentState = self.listOfStates[1]
self.__endingMission__()
else:
print("Something went wrong mate :(")
self.__endingMission__()
def takeOffMission(self):
if self.currentState == self.listOfStates[0]:
self.sendingCommand(self.takeoff(),1)
self.currentState = self.listOfStates[1]
self.__endingMission__()
else:
return 0
def circleMissionL(self, radius):
if self.currentState == self.listOfStates[0]:
radius = int(radius)
self.sendingCommand(self.takeoff(), 1)
self.sendingCommand(self.up(150), 1)
radius = int(radius * math.sqrt(2.0))
self.sendingCommand(self.curve(radius,0,0,radius,radius,0,20), 1)
self.sendingCommand(self.curve(-radius, 0, 0, -radius, -radius, 0, 20), 1)
self.sendingCommand(self.land(),1)
self.__endingMission__()
else:
self.__endingMission__()
return 0
def circleCurveMissionL(self, radius):
if self.currentState == self.listOfStates[0]:
radius = int(radius)
self.sendingCommand(self.takeoff(), 1)
radius = int(radius * math.sqrt(2.0))
self.sendingCommand(self.curve(radius,0,40,radius,radius,60,20), 1)
self.sendingCommand(self.curve(-radius, -40, 0, -radius, -radius, -60, 20), 1)
self.sendingCommand(self.land(),1)
self.__endingMission__()
else:
self.__endingMission__()
return 0
def eightMissionL(self, radius):
if self.currentState == self.listOfStates[0]:
radius = int(radius)
self.sendingCommand(self.takeoff(), 1)
self.sendingCommand(self.up(100), 1)
radius = int(radius * math.sqrt(2.0))
self.sendingCommand(self.curve(radius, 0, 0, radius, radius, 0, 30), 1.5)
self.sendingCommand(self.curve(-radius, 0, 0, -radius, -radius, 0, 30), 1.5)
self.sendingCommand(self.curve(-radius, 0, 0, -radius, radius, 0, 30), 1.5)
self.sendingCommand(self.curve(radius, 0, 0, radius, -radius, 0, 30), 1.5)
self.sendingCommand(self.land(), 1)
self.__endingMission__()
else:
self.__endingMission__()
return 0
def startMission(self, chooseMission: int, parametr):
if chooseMission == 0:
self.wait(int(self.minimalTimeWaiting * 0.75))
self.basicMisssionL()
elif chooseMission == 1:
self.wait(int(self.minimalTimeWaiting * 0.75))
self.squareMissionL(parametr)
elif chooseMission == 2:
self.wait(int(self.minimalTimeWaiting * 0.75))
self.squareMissionT(parametr)
elif chooseMission == 3:
self.wait(int(self.minimalTimeWaiting * 0.75))
self.takeOffMission()
elif chooseMission == 4:
self.wait(int(self.minimalTimeWaiting * 0.75))
self.circleMissionL(parametr)
elif chooseMission == 5:
self.wait(int(self.minimalTimeWaiting * 0.75))
self.eightMissionL(parametr)
elif chooseMission ==6:
self.wait(int(self.minimalTimeWaiting * 0.75))
self.circleCurveMissionL(parametr)
elif chooseMission == 7:
self.wait(int(self.minimalTimeWaiting * 0.75))
self.test(parametr)
def sendingCommand(self, command, scaleTime):
self.wait(int(self.minimalTimeWaiting * scaleTime))
command
def __saveLogs__(self):
with open('logs.csv', 'w') as file:
writer = csv.writer(file)
writer.writerow(['Id', 'Command', 'Response', 'Start_time', 'End_time', 'Duration'])
for item in self.log:
writer.writerow([item.id, item.command, item.response, item.start_time, item.end_time, item.duration])
|
#include <stdio.h>
#include "z_libpd.h"
void pdprint(const char *s) {
printf("%s", s);
}
void pdnoteon(int ch, int pitch, int vel) {
printf("noteon: %d %d %d\n", ch, pitch, vel);
}
int main(int argc, char **argv) {
if (argc < 3) {
fprintf(stderr, "usage: %s file folder\n", argv[0]);
return -1;
}
// init pd
int srate = 44100;
libpd_set_printhook(pdprint);
libpd_set_noteonhook(pdnoteon);
libpd_init();
libpd_init_audio(1, 2, srate);
float inbuf[64], outbuf[128]; // one input channel, two output channels
// block size 64, one tick per buffer
// compute audio [; pd dsp 1(
libpd_start_message(1); // one entry in list
libpd_add_float(1.0f);
libpd_finish_message("pd", "dsp");
// open patch [; pd open file folder(
libpd_openfile(argv[1], argv[2]);
// now run pd for ten seconds (logical time)
int i;
for (i = 0; i < 10 * srate / 64; i++) {
// fill inbuf here
libpd_process_float(1, inbuf, outbuf);
// use outbuf here
}
for (i = 0; i < 10; i++)
printf("%g\n", outbuf[i]);
return 0;
}
|
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.services.javascript;
import org.apache.tapestry5.json.JSONObject;
/**
* Used to change the configuration object which will be used to
* <a href="http://requirejs.org/docs/api.html#config">configure RequireJS</a>; callbacks can modify
* and override the configuration after it was created
* by the {@link ModuleManager} service based on contributed {@link JavaScriptModuleConfiguration}s.
* This allows components, pages, mixins and services to configure Require.JS dynamically in a
* per-request basis by using the
* {@link JavaScriptSupport#addModuleConfigurationCallback(ModuleConfigurationCallback)} method.
*
* Note that RequireJS is only configured during a full page render; on Ajax requests, RequireJS
* will already be loaded and configured.
*
*
* @see JavaScriptSupport#addModuleConfigurationCallback(ModuleConfigurationCallback)
* @since 5.4
*/
public interface ModuleConfigurationCallback
{
/**
* Receives the current configuration, which can be copied or returned, or (more typically) modified and returned.
*
* @param configuration
* a {@link JSONObject} containing the current configuration.
* @return a {@link JSONObject} containing the changed configuration, most probably the same
* one received as a parameter.
*/
JSONObject configure(JSONObject configuration);
}
|
#!/bin/bash
unknown_os ()
{
echo "Unfortunately, your operating system distribution and version are not supported by this script."
echo
echo "You can override the OS detection by setting os= and dist= prior to running this script."
echo "You can find a list of supported OSes and distributions on our website: https://packages.gitlab.com/docs#os_distro_version"
echo
echo "For example, to force CentOS 6: os=el dist=6 ./script.sh"
echo
echo "Please email support@packagecloud.io and let us know if you run into any issues."
exit 1
}
curl_check ()
{
echo "Checking for curl..."
if command -v curl > /dev/null; then
echo "Detected curl..."
else
echo "Installing curl..."
yum install -d0 -e0 -y curl
fi
}
detect_os ()
{
if [[ ( -z "${os}" ) && ( -z "${dist}" ) ]]; then
if [ -e /etc/os-release ]; then
. /etc/os-release
os=${ID}
if [ "${os}" = "poky" ]; then
dist=`echo ${VERSION_ID}`
elif [ "${os}" = "sles" ]; then
dist=`echo ${VERSION_ID}`
elif [ "${os}" = "opensuse" ]; then
dist=`echo ${VERSION_ID}`
elif [ "${os}" = "opensuse-leap" ]; then
os=opensuse
dist=`echo ${VERSION_ID}`
else
dist=`echo ${VERSION_ID} | awk -F '.' '{ print $1 }'`
fi
elif [ `which lsb_release 2>/dev/null` ]; then
# get major version (e.g. '5' or '6')
dist=`lsb_release -r | cut -f2 | awk -F '.' '{ print $1 }'`
# get os (e.g. 'centos', 'redhatenterpriseserver', etc)
os=`lsb_release -i | cut -f2 | awk '{ print tolower($1) }'`
elif [ -e /etc/oracle-release ]; then
dist=`cut -f5 --delimiter=' ' /etc/oracle-release | awk -F '.' '{ print $1 }'`
os='ol'
elif [ -e /etc/fedora-release ]; then
dist=`cut -f3 --delimiter=' ' /etc/fedora-release`
os='fedora'
elif [ -e /etc/redhat-release ]; then
os_hint=`cat /etc/redhat-release | awk '{ print tolower($1) }'`
if [ "${os_hint}" = "centos" ]; then
dist=`cat /etc/redhat-release | awk '{ print $3 }' | awk -F '.' '{ print $1 }'`
os='centos'
elif [ "${os_hint}" = "scientific" ]; then
dist=`cat /etc/redhat-release | awk '{ print $4 }' | awk -F '.' '{ print $1 }'`
os='scientific'
else
dist=`cat /etc/redhat-release | awk '{ print tolower($7) }' | cut -f1 --delimiter='.'`
os='redhatenterpriseserver'
fi
else
aws=`grep -q Amazon /etc/issue`
if [ "$?" = "0" ]; then
dist='6'
os='aws'
else
unknown_os
fi
fi
fi
if [[ ( -z "${os}" ) || ( -z "${dist}" ) ]]; then
unknown_os
fi
# remove whitespace from OS and dist name
os="${os// /}"
dist="${dist// /}"
echo "Detected operating system as ${os}/${dist}."
if [ "${dist}" = "8" ]; then
_skip_pygpgme=1
else
_skip_pygpgme=0
fi
}
finalize_yum_repo ()
{
if [ "$_skip_pygpgme" = 0 ]; then
echo "Installing pygpgme to verify GPG signatures..."
yum install -y pygpgme --disablerepo='gitlab_gitlab-ce'
pypgpme_check=`rpm -qa | grep -qw pygpgme`
if [ "$?" != "0" ]; then
echo
echo "WARNING: "
echo "The pygpgme package could not be installed. This means GPG verification is not possible for any RPM installed on your system. "
echo "To fix this, add a repository with pygpgme. Usualy, the EPEL repository for your system will have this. "
echo "More information: https://fedoraproject.org/wiki/EPEL#How_can_I_use_these_extra_packages.3F"
echo
# set the repo_gpgcheck option to 0
sed -i'' 's/repo_gpgcheck=1/repo_gpgcheck=0/' /etc/yum.repos.d/gitlab_gitlab-ce.repo
fi
fi
echo "Installing yum-utils..."
yum install -y yum-utils --disablerepo='gitlab_gitlab-ce'
yum_utils_check=`rpm -qa | grep -qw yum-utils`
if [ "$?" != "0" ]; then
echo
echo "WARNING: "
echo "The yum-utils package could not be installed. This means you may not be able to install source RPMs or use other yum features."
echo
fi
echo "Generating yum cache for gitlab_gitlab-ce..."
yum -q makecache -y --disablerepo='*' --enablerepo='gitlab_gitlab-ce'
echo "Generating yum cache for gitlab_gitlab-ce-source..."
yum -q makecache -y --disablerepo='*' --enablerepo='gitlab_gitlab-ce-source'
}
finalize_zypper_repo ()
{
zypper --gpg-auto-import-keys refresh gitlab_gitlab-ce
zypper --gpg-auto-import-keys refresh gitlab_gitlab-ce-source
}
main ()
{
detect_os
curl_check
yum_repo_config_url="https://packages.gitlab.com/install/repositories/gitlab/gitlab-ce/config_file.repo?os=${os}&dist=${dist}&source=script"
if [ "${os}" = "sles" ] || [ "${os}" = "opensuse" ]; then
yum_repo_path=/etc/zypp/repos.d/gitlab_gitlab-ce.repo
else
yum_repo_path=/etc/yum.repos.d/gitlab_gitlab-ce.repo
fi
echo "Downloading repository file: ${yum_repo_config_url}"
curl -sSf "${yum_repo_config_url}" > $yum_repo_path
curl_exit_code=$?
if [ "$curl_exit_code" = "22" ]; then
echo
echo
echo -n "Unable to download repo config from: "
echo "${yum_repo_config_url}"
echo
echo "This usually happens if your operating system is not supported by "
echo "packagecloud.io, or this script's OS detection failed."
echo
echo "You can override the OS detection by setting os= and dist= prior to running this script."
echo "You can find a list of supported OSes and distributions on our website: https://packages.gitlab.com/docs#os_distro_version"
echo
echo "For example, to force CentOS 6: os=el dist=6 ./script.sh"
echo
echo "If you are running a supported OS, please email support@packagecloud.io and report this."
[ -e $yum_repo_path ] && rm $yum_repo_path
exit 1
elif [ "$curl_exit_code" = "35" -o "$curl_exit_code" = "60" ]; then
echo
echo "curl is unable to connect to packagecloud.io over TLS when running: "
echo " curl ${yum_repo_config_url}"
echo
echo "This is usually due to one of two things:"
echo
echo " 1.) Missing CA root certificates (make sure the ca-certificates package is installed)"
echo " 2.) An old version of libssl. Try upgrading libssl on your system to a more recent version"
echo
echo "Contact support@packagecloud.io with information about your system for help."
[ -e $yum_repo_path ] && rm $yum_repo_path
exit 1
elif [ "$curl_exit_code" -gt "0" ]; then
echo
echo "Unable to run: "
echo " curl ${yum_repo_config_url}"
echo
echo "Double check your curl installation and try again."
[ -e $yum_repo_path ] && rm $yum_repo_path
exit 1
else
echo "done."
fi
if [ "${os}" = "sles" ] || [ "${os}" = "opensuse" ]; then
finalize_zypper_repo
else
finalize_yum_repo
fi
echo
echo "The repository is setup! You can now install packages."
}
main
|
<gh_stars>0
package weixin.member.entity;
import java.math.BigDecimal;
import java.util.Date;
import java.lang.String;
import java.lang.Double;
import java.lang.Integer;
import java.math.BigDecimal;
import javax.xml.soap.Text;
import java.sql.Blob;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.SequenceGenerator;
import org.jeecgframework.poi.excel.annotation.Excel;
/**
* @Title: Entity
* @Description: 微信组
* @author onlineGenerator
* @date 2015-01-16 16:17:44
* @version V1.0
*
*/
@Entity
@Table(name = "weixin_group", schema = "")
@SuppressWarnings("serial")
public class WeixinGroupEntity implements java.io.Serializable {
/**主键*/
private java.lang.String id;
/**创建人名称*/
private java.lang.String createName;
/**创建日期*/
private java.util.Date createDate;
/**分组编号*/
@Excel(exportName="分组编号")
private java.lang.Integer groupId;
/**分组名称*/
@Excel(exportName="分组名称")
private java.lang.String groupName;
/**同步状态*/
@Excel(exportName="同步状态")
private java.lang.String synchStatu;
/**分组编号*/
@Excel(exportName="用户数量")
private java.lang.Integer count;
/**微信Id*/
@Excel(exportName="微信Id")
private java.lang.String accountid;
/**
*方法: 取得java.lang.String
*@return: java.lang.String 主键
*/
@Id
@GeneratedValue(generator = "paymentableGenerator")
@GenericGenerator(name = "paymentableGenerator", strategy = "uuid")
@Column(name ="ID",nullable=false,length=36)
public java.lang.String getId(){
return this.id;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 主键
*/
public void setId(java.lang.String id){
this.id = id;
}
/**
*方法: 取得java.lang.String
*@return: java.lang.String 创建人名称
*/
@Column(name ="CREATE_NAME",nullable=true,length=50)
public java.lang.String getCreateName(){
return this.createName;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 创建人名称
*/
public void setCreateName(java.lang.String createName){
this.createName = createName;
}
/**
*方法: 取得java.util.Date
*@return: java.util.Date 创建日期
*/
@Column(name ="CREATE_DATE",nullable=true,length=20)
public java.util.Date getCreateDate(){
return this.createDate;
}
/**
*方法: 设置java.util.Date
*@param: java.util.Date 创建日期
*/
public void setCreateDate(java.util.Date createDate){
this.createDate = createDate;
}
/**
*方法: 取得java.lang.Integer
*@return: java.lang.Integer 分组编号
*/
@Column(name ="GROUP_ID",nullable=true,length=32)
public java.lang.Integer getGroupId(){
return this.groupId;
}
/**
*方法: 设置java.lang.Integer
*@param: java.lang.Integer 分组编号
*/
public void setGroupId(java.lang.Integer groupId){
this.groupId = groupId;
}
/**
*方法: 取得java.lang.String
*@return: java.lang.String 分组名称
*/
@Column(name ="GROUP_NAME",nullable=true,length=32)
public java.lang.String getGroupName(){
return this.groupName;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 分组名称
*/
public void setGroupName(java.lang.String groupName){
this.groupName = groupName;
}
/**
*方法: 取得java.lang.String
*@return: java.lang.String 同步状态
*/
@Column(name ="SYNCH_STATU",nullable=true,length=32)
public java.lang.String getSynchStatu(){
return this.synchStatu;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 同步状态
*/
public void setSynchStatu(java.lang.String synchStatu){
this.synchStatu = synchStatu;
}
/**
*方法: 取得java.lang.String
*@return: java.lang.String 微信Id
*/
@Column(name ="ACCOUNTID",nullable=true,length=32)
public java.lang.String getAccountid(){
return this.accountid;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 微信Id
*/
public void setAccountid(java.lang.String accountid){
this.accountid = accountid;
}
@Column(name ="COUNT",nullable=true,length=8)
public java.lang.Integer getCount() {
return count;
}
public void setCount(java.lang.Integer count) {
this.count = count;
}
}
|
import { Injectable } from '@angular/core';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { Observable } from 'rxjs';
@Injectable({
providedIn: 'root'
})
export class UserService {
baseurl = "https://hoodappx255.herokuapp.com";
httpHeaders = new HttpHeaders({'Content-Type': 'application/json'});
constructor(private http: HttpClient) { }
getAllUsers(): Observable<any> {
return this.http.get(this.baseurl + '/auth/login/',
{headers: this.httpHeaders}
);
}
getOneUser(id): Observable<any> {
return this.http.get(this.baseurl + '/api/v1/users/' + id,
{headers: this.httpHeaders}
);
}
updateUser(user): Observable<any> {
const body = {username: '' , email: ''};
return this.http.put(this.baseurl + '/users/' + user.id + '/', body);
}
AddUser(user): Observable<any> {
return this.http.post(this.baseurl + '/auth/signup/', user,
{headers: this.httpHeaders})
}
} |
import type { Document, DocumentPOJO } from "./document";
/**
* A shipping label
*/
export interface LabelPOJO extends DocumentPOJO {
/**
* The **actual** reference fields on the label, which may not match the originally-specified
* reference fields due to the carrier's restrictions on the number of fields or the length
* of each field.
*/
referenceFields?: string[];
}
/**
* A shipping label
*/
export interface Label extends Document {
/**
* The **actual** reference fields on the label, which may not match the originally-specified
* reference fields due to the carrier's restrictions on the number of fields or the length
* of each field.
*/
readonly referenceFields: ReadonlyArray<string>;
}
|
<gh_stars>1-10
import sgMail from '@sendgrid/mail';
import { config, logger } from '../config';
const sendEmail = async (to: string, from: string, subject: string, text: string, html: string) => {
// using Twilio SendGrid's v3 Node.js Library
// https://github.com/sendgrid/sendgrid-nodejs
sgMail.setApiKey(config.email.sendgridAPIKey);
const msg = {
to,
from,
subject,
text,
html,
};
sgMail.send(msg)
.then(() => {
logger.info(`📧 Email Sent to ${to}`);
})
// eslint-disable-next-line @typescript-eslint/no-explicit-any
.catch((error: any) => {
logger.warn(error);
});
};
const sendResetPasswordEmail = async (to: string, newPassword: string) => {
const subject = '🔐 Reset password';
const text = `Dear user,
Here your new password: ${newPassword}
If you did not request any password resets, then please let us now.`;
const html = `<p>Dear user,
Here your new password: <b>${<PASSWORD>}</b>
If you did not request any password resets, then please let us now.<p>`;
await sendEmail(to, 'nodeApp', subject, text, html);
};
const sendVerificationEmail = async (to: string, token: string) => {
const subject = 'Email Verification';
// TODO: -- replace this url with the link to the email verification service back-end app
const verificationEmailUrl = `${config.host}:${config.port}/api/auth/verify-email/${token}`;
const text = `Dear user,
To verify your email, click on this link: ${verificationEmailUrl}
If you did not create an account, then ignore this email.`;
const html = `<p>Dear user,
To verify your email, click <a href="${verificationEmailUrl}">Here</a> <br>
If you did not create an account, then ignore this email.`;
await sendEmail(to, 'nodeApp', subject, text, html);
};
export default {
sendEmail,
sendResetPasswordEmail,
sendVerificationEmail,
};
|
function square() {
let side = prompt("Enter the length of one side");
if (side != "") {
if (isNaN(side)) {
alert("The value of side should be a number.")
} else {
var area = Math.pow(side, 2);
alert("Area of the square is " + area + " square units.");
}
} else {
alert("No value entered.")
}
}
function rectangle() {
let length = prompt("Enter the value of length");
let width = prompt("Enter the value of width");
if (length != "" && width != "") {
if (isNaN(length) && isNaN(width)) {
alert("Values should be numbers");
} else {
let area = length * width;
alert("Area of the rectangle is " + area + " square units.");
}
} else {
alert("Provide both values.")
}
}
function circle() {
let radius = prompt("Enter the value of radius");
if (radius != "") {
if (isNaN(radius)) {
alert("The value of radius should be a number.")
} else {
var area = Math.PI * Math.pow(radius, 2);
area = Math.round(area);
alert("Area of the circle is " + area + " square units.");
}
} else {
alert("No value entered.")
}
}
function rhombus() {
let length = prompt("Enter the value of length");
let height = prompt("Enter the value of height");
if (length != "" && height != "") {
if (isNaN(length) && isNaN(height)) {
alert("Values should be numbers");
} else {
let area = length * height;
alert("Area of the rhombus is " + area + " square units.");
}
} else {
alert("Provide both values.")
}
}
function triangle() {
let base = prompt("Enter the value of base");
let height = prompt("Enter the value of height");
if (base != "" && height != "") {
if (isNaN(base) && isNaN(height)) {
alert("Values should be numbers");
} else {
let area = 0.5 * (base * height);
alert("Area of the triangle is " + area + " square units.");
}
} else {
alert("Provide both values.")
}
}
function cylinder() {
let radius = prompt("Enter the value of radius");
let height = prompt("Enter the value of height");
if (radius != "" && height != "") {
if (isNaN(radius) && isNaN(height)) {
alert("Values should be numbers");
} else {
let area = (Math.PI * Math.pow(radius, 2)) + (2 * (Math.PI * radius * height));
area = Math.round(area);
alert("Area of the cylinder is " + area + " square units.");
}
} else {
alert("Provide both values.")
}
} |
#!/bin/bash
set -e
echo 'mode: atomic' > coverage.txt
go test -covermode=atomic -coverprofile=coverage.txt -v -race ./cmd/... ./internal/... ./pkg/...
|
REQUIRE via__ssh
REQUIRE misc__newuid
newuid() { misc__newuid "$@"; }
cible__remote__conncheck() {
local target="$1";shift
[ -d "$BASEDIR/run/$target" ] || mkdir -- "$BASEDIR/run/$target"
local uid="$(newuid)"
if [ "$(
via__ssh "$target" -n ${CIBLE_SSH_OPTIONS} "echo $uid"
)" = "$uid" ]; then
echo >&2 "ok: sshcheck $target"
echo "ok: sshcheck $target" >> "$BASEDIR/run/$target/log"
else
echo >&2 "KO: sshcheck $target"
echo "KO: sshcheck $target" >> "$BASEDIR/run/$target/log"
return 1
fi
}
|
package com.supanadit.restsuite.panel.rest.response;
import javax.swing.*;
public class ResponseTabPanel extends JTabbedPane {
protected ResponseBodyPanel responseBodyPanel;
public ResponseTabPanel() {
responseBodyPanel = new ResponseBodyPanel(false);
add("Response", responseBodyPanel);
}
public ResponseBodyPanel body() {
return responseBodyPanel;
}
}
|
#!/bin/bash
# ============LICENSE_START=======================================================
# Copyright (c) 2018-2020 AT&T Intellectual Property. All rights reserved.
# Copyright (d) 2020-2021 J. F. Lucas. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============LICENSE_END=========================================================
# Pull plugin wagon files and type files from repo
# $1 is the DCAE repo URL - script assumes all files come from the
# same repo, but not necessarily same paths
#
set -x -e
# Location in CM container where plugins/type files will be stored
# At deployment, CM script will look here to find plugins to upload
DEST=${DEST:-/opt/plugins}
# Each line has a plugin wagon/type file pair, in the form
# /path/to/plugin/wagon|/path/to/type/file
PLUGINS=\
"\
/dcaepolicyplugin/2.4.0/dcaepolicyplugin-2.4.0-py36-none-linux_x86_64.wgn|/dcaepolicyplugin/2.4.0/dcaepolicyplugin_types.yaml \
/relationshipplugin/1.1.0/relationshipplugin-1.1.0-py36-none-linux_x86_64.wgn|/relationshipplugin/1.1.0/relationshipplugin_types.yaml \
/k8splugin/3.9.0/k8splugin-3.9.0-py36-none-linux_x86_64.wgn|/k8splugin/3.9.0/k8splugin_types.yaml \
/clamppolicyplugin/1.1.1/clamppolicyplugin-1.1.1-py36-none-linux_x86_64.wgn|/clamppolicyplugin/1.1.1/clamppolicyplugin_types.yaml \
/dmaap/1.5.1/dmaap-1.5.1-py36-none-linux_x86_64.wgn|/dmaap/1.5.1/dmaap_types.yaml \
/pgaas/1.3.0/pgaas-1.3.0-py36-none-linux_x86_64.wgn|/pgaas/1.3.0/pgaas_types.yaml \
/sshkeyshare/1.2.0/sshkeyshare-1.2.0-py36-none-linux_x86_64.wgn|/sshkeyshare/1.2.0/sshkeyshare_types.yaml
"
mkdir -p ${DEST}
for p in ${PLUGINS}
do
w=$(echo $p | cut -d '|' -f1)
t=$(echo $p | cut -d '|' -f2)
# Put each wagon/type file pair into its own subdirectory
# This prevents name collisions which can happen because
# type files don't embed a version.
subdir=$(mktemp -d -t plugin-XXXXXXX --tmpdir=${DEST})
curl -Ss -L -f $1/$t >> ${subdir}/$(basename $t)
curl -Ss -L -f $1/$w >> ${subdir}/$(basename $w)
done
chown -R cfyuser:cfyuser ${DEST}
|
<reponame>consento-org/hlc<filename>faq/non-unique.js
const util = require('util')
const HLC = require('..')
let now = 1n
const wallTime = () => now
const node1Offset = 0n
const node1 = new HLC({ wallTime: () => wallTime() + 0n + node1Offset })
let node2Offset = 0n
const node2 = new HLC({ wallTime: () => wallTime() + 1n + node2Offset })
let node3Offset = 0n
const node3 = new HLC({ wallTime: () => wallTime() + 5n + node3Offset })
function syncAllNodes () {
const now1 = node1.now()
const now2 = node2.now()
const now3 = node3.now()
node1.update(now2)
node1.update(now3)
node2.update(now1)
node2.update(now3)
node3.update(now1)
node3.update(now2)
return 'sync all nodes'
}
function advanceTime (amount) {
now += amount
return `advance time by \`${amount}n\` (wallTime=\`${now}\`)`
}
console.log(render`# How HLC's are not unique?
Looking into statements about HLC's we find:
> Most of the time they store a node ID too as a tie breaker in case of identical timestamps and counters. ([source](https://imfeld.dev/notes/hybrid_logical_clock))
> However it doesn't guarantee that provided timestamps will be unique across services ... sometimes come together with some unique and ordered process identifier ([source](https://bartoszsypytkowski.com/hybrid-logical-clocks/))
> In practice, you'll want a third element, a "node ID" that is unique per device. ([source](https://jaredforsyth.com/posts/hybrid-logical-clocks/))
... unfortunately none of these elaborate why a "node ID" is a good idea. This article is an attempt to mitigate this.
Let's say we have three HLC nodes with an artificial \`wallTime\` that is frozen in time, currently at ${now}.
_Node2_ is \`1n\` ahead of _node1_ and _node3_ is another \`4n\` ahead of node1. In a real system the difference between the nodes
may be much larger. Also of note is that the nodes are progressing in parallel, meaning that one node likely stays ahead of the others.
- node1: ${node1.now()}
- node2: ${node2.now()}
- node3: ${node3.now()}
Without syncing or progresses in the \`wallTime\`, naturally _node1_ and _node2_ produce the same, non-unique timestamps
consistently.
- node1: ${node1.now()}
- node2: ${node2.now()}
- node3: ${node3.now()}
As you can see all of the node's logical counters increased even though the \`wallTime\` didn't making the timestamps unique
only per node. In practice, time advances which means that two timestamps would need to be created within \`< 1 nanosecond\`
of time, which is very unlikely. Let's ${advanceTime(1n)} and we shall see that the logical clock is reset.
- node1: ${node1.now()}
- node2: ${node2.now()}
- node3: ${node3.now()}
The strength of HLC's is revealed when a sync happens. If we ${syncAllNodes()}, we will notice that
all timestamps are beyond the \`wallTime\`=${now} and also beyond the previous largest timestamp of _node3_.
- node1: ${node1.now()}
- node2: ${node2.now()}
- node3: ${node3.now()}
If we further ${advanceTime(1n)}, it will not make a difference for _node1_ and _node2_. Their \`wallTime\` is still
behind the last \`wallTime\` they used, but their logical component will increase.
- node1: ${node1.now()}
- node2: ${node2.now()}
- node3: ${node3.now()}
Just to illustrate this point, we can ${advanceTime(1n)} once more and the \`wallTime\` will still not have increased.
- node1: ${node1.now()}
- node2: ${node2.now()}
- node3: ${node3.now()}
Only if we ${advanceTime(4n)} to a point where it caught up with the \`5n\` advance of _node3_ during the last sync,
the other nodes use the \`wallTime\` again.
- node1: ${node1.now()}
- node2: ${node2.now()}
- node3: ${node3.now()}
Unlike the case of multiple operations per nanosecond, this case is actually rather common. Clocks are never
100% in sync and after a sync process it is very likely that all but the most advanced clock will use the
logical component.
The clocks using the logical component will increment independently. This will create equivalent,
non-unique timestamps in the process.
## Bonus Question: Why don't we sync the wallTime?
There is a reasonably argument to be made that the \`wallTime\` should be increased for the nodes lagging behind.
You can add an offset to the largest \`wallTime\` that you received.
It is a good idea to do that, particularly to reduce the chance of duplicates and it may make a little more sense
for users. However, you need to be careful when offsets to the \`wallTime\` don't compound.
In the previous examples, the time for all nodes advanced at the same pace. But in reality it does not. Each
node increments at slightly different paces. Let's say we adjust _node2's? wallTime to add \`4n\` => ${1n + (node2Offset += 4n)}
offset to its \`wallClock\`. When we next ${advanceTime(1n)} we will see that _node2_ and _node3_ both advanced.
- node2: ${node2.now()}
- node3: ${node3.now()}
What could happen though is that _node2_ advances sometimes at a pace of ${node2Offset += 2n} while
_node3_ advances sometimes advances at the pace of ${node3Offset += 1n}.
- node2: ${node2.now()}
- node3: ${node3.now()}
Suddenly _node2_ is ahead and _node3_ needs to compensate by increasing its offset by \`+1\`, even though it is
physically the still \`3n\` ahead of _node2_! From that point on every unevenness in advance causes
all nodes to adjust for the biggest entry. This will lead to clocks automaticaly drifting little by little into the future.
Due to this difficulty, using the \`wallTime\` synching is not _(yet?)_ generalized and you need to be careful to make
sure that you don't exceed the time of the actually "furthest ahead" node.
### Important to note
Synching \`wallTime\` will reduce the probability for a non-unique timestamp drastically, but even so
it is still possible to have non-unique timestamps: Two nodes that happen to have the same \`wallTime\` and \`logical time\`.
The probability depends on the numbers of nodes, the amount of average drift and number of timestamps created.
Unless you feel confident that your probabilities are in your favor, it is a good idea to assume timestamps as non-unique.
`)
function render (strings, ...keys) {
const result = [strings[0]]
keys.forEach(function (key, i) {
if (typeof key !== 'string') {
key = '`' + util.inspect(key) + '`'
}
result.push(key, strings[i + 1])
})
return result.join('')
}
|
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2018.3 (64-bit)
#
# Filename : CNN_top_module.sh
# Simulator : Mentor Graphics ModelSim Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Mon May 06 21:20:21 +0700 2019
# SW Build 2405991 on Thu Dec 6 23:38:27 MST 2018
#
# Copyright 1986-2018 Xilinx, Inc. All Rights Reserved.
#
# usage: CNN_top_module.sh [-help]
# usage: CNN_top_module.sh [-lib_map_path]
# usage: CNN_top_module.sh [-noclean_files]
# usage: CNN_top_module.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'CNN_top_module.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "CNN_top_module.sh - Script generated by export_simulation (Vivado v2018.3 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <simulate>
simulate()
{
vsim -64 -c -do "do {simulate.do}" -l simulate.log
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./CNN_top_module.sh -help\" for more information)\n"
exit 1
fi
copy_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
copy_setup_file $2
esac
create_lib_dir
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Copy modelsim.ini file
copy_setup_file()
{
file="modelsim.ini"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="B:/Dokumen/CNN_HW/CNN_HW.cache/compile_simlib/modelsim"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
cp $src_file .
fi
}
# Create design library directory
create_lib_dir()
{
lib_dir="modelsim_lib"
if [[ -e $lib_dir ]]; then
rm -rf $lib_dir
fi
mkdir $lib_dir
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaborate.log simulate.log vsim.wlf modelsim_lib)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
create_lib_dir
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./CNN_top_module.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: CNN_top_module.sh [-help]\n\
Usage: CNN_top_module.sh [-lib_map_path]\n\
Usage: CNN_top_module.sh [-reset_run]\n\
Usage: CNN_top_module.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
import Swiper, {Navigation, Pagination, Keyboard} from 'swiper';
Swiper.use([Navigation, Pagination, Keyboard]);
window.sliders = function() {
let sliderElements = document.querySelectorAll('[data-slider]');
let sliders = {};
if (!sliderElements.length) {
return;
}
sliderElements.forEach(slider => {
let opts = JSON.parse(slider.dataset.slider);
sliders[slider.getAttribute('id')] = new Swiper(slider, opts);
const ro = new ResizeObserver(entries => {
for (let entry of entries) {
sliders[slider.getAttribute('id')].updateSize();
}
});
ro.observe(slider);
});
}; |
import Ember from 'ember';
function animationendHandler(event){
if(event.animationName==='getsmaller'){
this.get('close')();
}
}
export default Ember.Component.extend({
//tagName:'',
lifeTime: 10000,
didRender(){
let animator = this.element.querySelector(".ember-growl-notification-item-box-time-to-die");
animator.addEventListener("animationend", animationendHandler.bind(this), false);
},
actions:{
close:function(){
this.get('close')();
}
}
});
|
package com.enonic.app.gitpull;
import java.util.Map;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import com.enonic.app.gitpull.connection.GitConnection;
@Component(immediate = true, configurationPid = "com.enonic.app.gitpull")
public final class GitPullServiceImpl
implements GitPullService
{
private GitConnectionConfig config;
GitRepoPuller repoPuller;
public GitPullServiceImpl()
{
this.repoPuller = new GitRepoPullerImpl();
}
@Activate
public void activate( final Map<String, String> config )
{
this.config = GitConnectionConfig.create( config );
pullAll();
}
@Override
public void pullAll()
{
this.config.toMap().values().forEach( this::pull );
}
private void pull( final GitConnection connection )
{
this.repoPuller.pull( connection );
}
}
|
<filename>app/src/main/java/com/example/zhongweikang/beijingnew/User_define/myRefreshListView.java
package com.example.zhongweikang.beijingnew.User_define;
import android.content.ContentUris;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.RotateAnimation;
import android.widget.AbsListView;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.example.zhongweikang.beijingnew.R;
import java.text.SimpleDateFormat;
import java.util.Date;
import static android.R.attr.paddingTop;
/**
* Created by zhongweikang on 2017/2/24.
*/
public class myRefreshListView extends ListView {
private int DownX, DownY = -1;
private LinearLayout Headerview;
private int headTop;
private View pullDownHeadView;
private View secondHeaderView;
private int ListViewOnScreenY = -1;
private final int refersh = 0; // 下拉刷新
private final int release_refesh = 1; // 释放刷新
private final int refershing = 2; // 正在刷新
private int currentState = 0; // 当前刷新状态
private TextView time;
private TextView tittle;
private ImageView image1;
private ProgressBar progressBar;
private RotateAnimation UpAnmia,downAnmia;
private OnRefershListener mOnRefreshListener; // 回调的事件的对象
private View footView;
private int footerViewHeight;
private boolean isLoadingMore=false; //
public myRefreshListView(Context context) {
super(context);
addRfeshListViewHead();
initFooterView();
}
public myRefreshListView(Context context, AttributeSet attrs) {
super(context, attrs);
addRfeshListViewHead();
initFooterView();
}
public myRefreshListView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
addRfeshListViewHead();
initFooterView();
}
/* 添加头布局*/
public void addRfeshListViewHead() {
Headerview = (LinearLayout) inflate(getContext(), R.layout.add_refreshlist_head, null);
pullDownHeadView = Headerview.findViewById(R.id.head_lin); // 头部对象
time = (TextView) Headerview.findViewById(R.id.head_time);
time.setText("最后的刷新时间"+getCurrentTime());
tittle = (TextView) Headerview.findViewById(R.id.head_tittle);
image1 = (ImageView) Headerview.findViewById(R.id.head_fr);
progressBar = (ProgressBar) Headerview.findViewById(R.id.head_pro);
/* 侧量头部的长度*/
pullDownHeadView.measure(0, 0);
headTop = pullDownHeadView.getMeasuredHeight(); // 侧量头部的高度
pullDownHeadView.setPadding(0, -headTop, 0, 0); // 隐藏头布局
myRefreshListView.this.addHeaderView(Headerview);
initAnimation(); // 初始化头布局的动画
}
/* 设置动画的方法*/
private void initAnimation() {
// 向上的动画
UpAnmia=new RotateAnimation(0,-180, Animation.RELATIVE_TO_SELF,0.5f,Animation.RELATIVE_TO_SELF,0.5f);
UpAnmia.setFillAfter(true);
UpAnmia.setDuration(1000);
// 向下的动画
downAnmia=new RotateAnimation(-180,-360, Animation.RELATIVE_TO_SELF,0.5f,Animation.RELATIVE_TO_SELF,0.5f);
UpAnmia.setFillAfter(true);
UpAnmia.setDuration(1000);
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
switch (ev.getAction()) {
case MotionEvent.ACTION_DOWN:
DownY = (int) ev.getY();
break;
case MotionEvent.ACTION_MOVE:
if (DownY == -1) {
DownY = (int) ev.getY();
}
int moveY = (int) ev.getY();
int diffY = DownY - moveY; // 向下滑动为负数
// 判定当前是否正在刷新中,如果
if (currentState==refershing){
// 当前正在刷新中,不执行下拉,直接跳出
break;
}
// 判断轮播图是否完全显示了
boolean isDisplay = isDisplaySecondHeaderView();
if (!isDisplay) {
// 没有完全显示,不执行下来刷新头操作,直接跳出Switch
break;
// 完全显示,则走下面的方法
}
if (diffY < 0) {
int paddingTop = -headTop - diffY; // 距离上部的高度
if (paddingTop > 0 && currentState != release_refesh) { // 完全显示并且当前的状态不是释放刷新的状态的时候,进入松开刷新状态
currentState = release_refesh; // 当先的状态改变成释放刷新
refreshHeadViewState();
} else if (paddingTop < 0 && currentState != refersh) { // 没有完全实现,并且当前的状态是不在刷新的时候,不会进入松开刷新状态
currentState = refersh; // 当前的状态改变为要刷新
refreshHeadViewState();
}
pullDownHeadView.setPadding(0, paddingTop, 0, 0);
return true;
}
break;
case MotionEvent.ACTION_UP:
DownY = -1;
if (currentState==refersh){ // 下拉刷新,,将头布局隐藏
pullDownHeadView.setPadding(0,-headTop,0,0);
}
else if (currentState==release_refesh){
// 进入到正在刷新的状态
currentState=refershing;
refreshHeadViewState();
pullDownHeadView.setPadding(0,0,0,0);
// 调用用户的回调事件,用于刷新数据
if (mOnRefreshListener!=null){
mOnRefreshListener.onPullDownRefresh();
}
}
break;
}
return super.onTouchEvent(ev);
}
/* 根据当前头布局的状态,改变头布局*/
private void refreshHeadViewState() {
switch (currentState) {
case refersh: // 下拉刷新
image1.startAnimation(downAnmia);
tittle.setText("下拉刷新");
break;
case release_refesh: // 释放刷新
image1.startAnimation(UpAnmia);
tittle.setText("松开刷新");
break;
case refershing: // 正在刷新中
image1.clearAnimation();
image1.setVisibility(GONE);
progressBar.setVisibility(VISIBLE);
tittle.setText("正在刷新中");
break;
}
}
/* 添加第二个头布局*/
public void addSecondHeaderView(View SecondHeaderView) {
this.secondHeaderView = SecondHeaderView;
Headerview.addView(SecondHeaderView); // 将传过来的额
}
/* 判断轮播图是否完全显示*/
public boolean isDisplaySecondHeaderView() {
// 获取ListView在屏幕中y轴的值
int[] loactionXY = new int[2]; // 0 位 x值,1,为Y的值
if (ListViewOnScreenY == -1) {
this.getLocationOnScreen(loactionXY);
ListViewOnScreenY = loactionXY[1]; // 取出Y值
}
// 获取轮播图在屏幕中的Y轴的值
secondHeaderView.getLocationOnScreen(loactionXY);
int mSecondHeaderViewOnScreen = loactionXY[1];
if (ListViewOnScreenY <= mSecondHeaderViewOnScreen) { // 完全显示的时候
return true;
} else {
return false;
}
}
/* 回调 ,当前的ListView刷新的回调接口*/
public interface OnRefershListener{
/*
* 当下拉刷新时触发此方法
* */
void onPullDownRefresh();
/*
* 加载更多的方法*/
void onLoadingMore();
}
public void SetOnRefreshListener(OnRefershListener listener){
this.mOnRefreshListener=listener;
}
/* 刷新完数据后,将头布局隐藏掉或者将脚布局隐藏掉*/
public void onRefreshFinish( boolean isSuccess){
// 判定是加载更多或者下拉刷新
if (isLoadingMore){ // 加载更多中为真,就隐藏掉
footView.setPadding(0,-footerViewHeight,0,0);
isLoadingMore=false;
}else { // 为假就是,就是设置头布局的设置
pullDownHeadView.setPadding(0,-headTop,0,0);
currentState=refersh;
progressBar.setVisibility(GONE);
image1.setVisibility(VISIBLE);
tittle.setText("下拉刷新");
if (isSuccess){
time.setText("最后刷新时间"+getCurrentTime());
}
}
}
public String getCurrentTime(){
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Log.d("sdf",sdf.format(new Date()));
return sdf.format(new Date());
}
/* 添加脚布局*/
private void initFooterView(){
footView=inflate(getContext(),R.layout.add_refreshlist_foot,null);
footView.measure(0,0);
footerViewHeight=footView.getMeasuredHeight();
footView.setPadding(0,-footerViewHeight,0,0);
this.addFooterView(footView);
/*为这个自定义的listView设置滚动事件*/
this.setOnScrollListener(new OnScrollListener() {
/* 滚动时间改变的时候的,第一个参数是listView这个事件,scrollState是状态*/
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
//当滚动停止时,或者惯性滑动时,ListView最后一个显示的太欧姆索引为getCout-1时
if(scrollState==SCROLL_STATE_IDLE||scrollState==SCROLL_STATE_FLING){
if (getLastVisiblePosition()==getCount()-1&& !isLoadingMore){ // 滑动到最后Item,只有没有加载的时候才可以触发继续加载
Log.d("tag","滚动到底部了");
isLoadingMore=true; // 标志,为true时,在进就进不来了
footView.setPadding(0,0,0,0); // 显示脚步
setSelection(getCount());
/* 调用使用者的回调事件*/
if (mOnRefreshListener!=null){
mOnRefreshListener.onLoadingMore();
}
}
}
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
}
});
}
}
|
let agent = navigator.userAgent.toLowerCase();
$( function() {
$( ".Thisdraggable" ).draggable({
zIndex: 10,
helper : "clone",
revert: 'invalid',
});
$( ".thisdroppable" ).droppable({
drop: function( event, dragui ) {
let targetId = $(this).attr('id'); //userid
let originId = dragui.draggable.closest("main").attr("id");
if ( originId === undefined ) originId = 'undefined';
let tmptargetworkidx = dragui.draggable.attr("id").split("_");
let targetworkidx = tmptargetworkidx[1];
if(originId == "undefined" && targetId =='ToDoStandbyArea'){
//$(this).draggable("destroy");
return false;
}
/*ui.draggable.removeClass("ui-draggable");
ui.draggable.removeClass("ui-draggable-handle");
ui.draggable.css("inset",'');
ui.draggable.css("position",'');
ui.draggable.css("z-index",'100');
ui.draggable.detach().appendTo($(this));*/
//ui.draggable.removeAttr("style");
//ui.draggable.removeClass("ui-draggable");
//ui.draggable.removeClass("ui-draggable-handle");
dragui.draggable.css("inset",'');
if (agent.indexOf("firefox") != -1) {
dragui.draggable.css("position",'relative');
}else{
dragui.draggable.css("position",'');
}
dragui.draggable.css("width","100%");
dragui.draggable.css("z-index",100);
dragui.draggable.detach().appendTo($(this));
//ui-draggable ui-draggable-handle
if ( targetId == 'ToDoStandbyArea') { //업무초기화
//if ( targetId !== originId ) {
fn_todoset(targetworkidx,'CLEAR',originId);
//}
}else{ //업무할당
if ( targetId !== originId ) {
fn_todoset(targetworkidx,targetId,originId);
}
}
}
});
} );
function removeitem(targetIdx) {
$('#workid_'+ targetIdx ).remove();
return false;
}
function ajax_statics_update( ) {
}
function fn_todoset(targetworkidx,targetId,OriginToDoID){
let TargetProjectName = $("#workid_" + targetworkidx).find(".TargetProjectName").text();
TargetProjectName = TargetProjectName.replace("☞ 인트라넷 바로가기","");
jQuery.ajax({
type: "POST",
dataType: "json",
url: "/manager/project/settodo",
data: "ProjectWorkIdx=" + targetworkidx + "&ToDoID=" + targetId +"&OriginToDoID="+ OriginToDoID,
async: false,
beforeSend: function () {
$('.wrap-loading').removeClass('display_none');
},
success: function (json) {
if (json.result === false) {
alert('처리중 오류가 발생하였습니다');
return false;
} else {
// 해당 팀페이지에 알림 전송
let newArray = [];
let makedata = {};
let messenger_user = "";
if ( targetId == "CLEAR" ) {
//messenger_user = json.ToDoUser+"의 업무가 취소";
messenger_user = "[알림]업무취소 담당자:" + json.ToDoUser + ",업무명:" + TargetProjectName;
}else if ( OriginToDoID == "undefined" && targetId > 0 ) {
//messenger_user = json.ToDoUser+"의 신규업무할당";
messenger_user = "[알림]신규업무 담당자:" + json.ToDoUser + ",업무명:" + TargetProjectName;
}else{
//messenger_user = json.OldToDoUser+"의 업무가 "+ json.ToDoUser +"으로 변경";
messenger_user = "[알림]담당변경 :" + json.OldToDoUser + " ▶ " + json.ToDoUser + ",업무명:" + TargetProjectName;
}
makedata.cal_UID = $("#UID").val();
makedata.cal_title = "프로젝트 업무 할당";
makedata.cal_start = null;//res.data.TmpSdate;
makedata.cal_end = null;//res.data.TmpSdate;
makedata.cal_id = targetworkidx;
makedata.cal_todouser = messenger_user;
newArray.push(makedata);
ui.display.addIndivisualTodo(newArray);
//메시지를 채팅창에 넣어준다
ui.message.notifysend(messenger_user);
$('.wrap-loading').addClass('display_none');
return false;
}
},
complete: function () {
$('.wrap-loading').addClass('display_none');
}
});
}
$(document).ready(function(){
let LoadAreaWidth = parseInt($("#TeamProjectArea").width());
const LoadMemberCount = $(".kanban-container").length;
if (LoadAreaWidth > LoadMemberCount*300 ) {
$(".thisSliderBtn").addClass("display_none");
}
var doubleSubmitFlag = false;
function doubleSubmitCheck(){
if(doubleSubmitFlag){
return doubleSubmitFlag;
}else{
doubleSubmitFlag = true;
return false;
}
}
var doubleSubmitFlag2 = false;
function doubleSubmitCheck2(){
if(doubleSubmitFlag2){
return doubleSubmitFlag2;
}else{
doubleSubmitFlag2 = true;
return false;
}
}
$(document).keyup(function(event) {
if(doubleSubmitCheck()) return;
if (event.keyCode == '37') {
$("#prevBtn").trigger("click");
}
else if (event.keyCode == '39') {
$("#nextBtn").trigger("click");
}
return false;
});
function tourLandingScript() {
let TargetAreaWidth = parseInt($("#TeamProjectArea" ).width());
if (TargetAreaWidth > LoadMemberCount*300 ) {
$(".thisSliderBtn").addClass("display_none");
}else{
$(".thisSliderBtn").removeClass("display_none");
}
let viewCount = parseInt(TargetAreaWidth/300);
KanbanSlider.reloadSlider({
autoControls: true,
speed: 500,
slideSelector: 'li',
minSlides: 1,
maxSlides: viewCount,
moveSlides: 1,
slideWidth: 300,
slideMargin: 5,
pager:false,
controls:false,
infiniteLoop:false,
touchEnabled:false,
oneToOneTouch: false,
onSliderLoad: function () {
$(".bx-viewport").css("overflow","");
},
});
}
$(window).resize(function(){
tourLandingScript();
});
let TargetAreaWidth = parseInt($("#TeamProjectArea" ).width());
let viewCount = parseInt(TargetAreaWidth/300);
let KanbanSlider = $('#teamKanban').bxSlider({
autoControls: true,
speed: 500,
slideSelector: 'li',
minSlides: 1,
maxSlides: viewCount,
moveSlides: 3,
slideWidth: 300,
slideMargin: 5,
pager:false,
controls:false,
infiniteLoop:false,
touchEnabled:false,
oneToOneTouch: false,
onSliderLoad: function () {
$(".bx-viewport").css("overflow","");
},
});
$("#prevBtn").click(function(e){
if(doubleSubmitCheck2()) return;
KanbanSlider.goToPrevSlide();
setTimeout(function() {
doubleSubmitFlag = false;
doubleSubmitFlag2 = false;
}, 500);
e.preventDefault();
return false;
});
$("#nextBtn").click(function(e){
if(doubleSubmitCheck2()) return;
KanbanSlider.goToNextSlide();
setTimeout(function() {
doubleSubmitFlag = false;
doubleSubmitFlag2 = false;
}, 500);
e.preventDefault();
return false;
});
$(document).on("click", "#btn_add_todo", function (e) {
$('#formreg_todo').show();
e.preventDefault();
return false;
});
$(document).on("click", ".popcls", function (e) {
$('#formreg_todo').hide();
e.preventDefault();
return false;
});
$(document).on("click", ".btn_click_info", function () {
let idx = $(this).data('idx');
$("#popdetail").setLayer({
'url' : '/manager/project/popdetail2/' + idx,
'width' : 1024,
'max_height' : 500
});
});
$(document).on("click", ".intra_page_link", function () {
let go_url = $(this).data('url');
window.open(go_url, '_blank');
return false;
});
$(document).off('change', '#ProjectTeam').on('change', '#ProjectTeam',function() {
let idx = $(this).val();
if ( idx > 0 ) {
location.href='/manager/kanban/' + idx;
}
return false;
});
$('#TodoRegFrom').on("submit", function() {
if ( $('#ProjectIdx').val() == "") {
alert("프로젝트 선택은 필수입니다..");
$('#CompanyRegistrationNo').focus();
return false;
}
if ( $('#title').val() == "") {
alert("업무타이틀을 입력해주세요");
$('#CompanyRegistrationNo').focus();
return false;
}
if ( $('#Foretime').val() < 1) {
alert("예상소요시간을 입력해주세요");
$('#Foretime').focus();
return false;
}
if (!confirm('Todo업무를 등록하시겠습니까?')) return false;
$.ajax({
type: "POST",
url: "/manager/project/addtodo",
data: $('#TodoRegFrom').serialize(),
dataType: "JSON",
async : false,
success: function(res){
if ( res.result === false ) {
alert(res.message);
return false;
}else{
let pname = $("#ProjectIdx option:selected").text();
//리스트에 추가
html = "<div class='kanban-item Thisdraggable' style='cursor: pointer !important;' id='workid_"+res.result_idx+"'>프로젝트 : "+pname+"<br ><span class='project_title_wrap'><i class='fa fa-info-circle noh_cursor btn_click_info' data-idx='"+res.result_idx +"'></i><span class='TargetProjectName'>"+$('#title').val()+"</span></span></div>";
$("#ToDoStandbyArea").append(html);
$( "#workid_"+res.result_idx).draggable({
zIndex: 10,
helper : "clone",
revert: 'invalid',
});
$('#formreg_todo').hide();
}
}
});
return false;
});
$(document).on("click", ".noti_close", function () {
let strNoticeIdx = $(this).data('idx')?$(this).data('idx'):("#NowNoticeIdx").val();
if (!confirm('공지를 내리시겠습니까?')) return false;
jQuery.ajax({
type: "POST",
dataType: "json",
url: "/manager/project/noticedelete",
data: "NoticeIdx=" + strNoticeIdx,
async: false,
beforeSend: function () {
$('.wrap-loading').removeClass('display_none');
},
success: function (json) {
if (json.result === true) {
// 해당 팀페이지에 알림 전송
let newArray = [];
let makedata = {};
makedata.target_idx = strNoticeIdx;
makedata.target_title = $('#TargetNoticeTitle').text();
makedata.target_mode = 'remove';
newArray.push(makedata);
ui.display.fn_updateNotice(newArray);
$(".alarm_noti").addClass("display_none");
$("#NowNoticeIdx").val('');
return false;
} else {
alert(json.message);
return false;
}
},
complete: function () {
$('.wrap-loading').addClass('display_none');
}
});
});
}); |
#!/bin/bash
cd /home/ec2-user/app
echo "[+] Running down Docker containers"
docker-compose down
|
#!/usr/bin/env bash
# bash-it installer
# Show how to use this installer
function _bash-it_show_usage() {
echo -e "\n$0 : Install bash-it"
echo -e "Usage:\n$0 [arguments] \n"
echo "Arguments:"
echo "--help (-h): Display this help message"
echo "--silent (-s): Install default settings without prompting for input"
echo "--interactive (-i): Interactively choose plugins"
echo "--no-modify-config (-n): Do not modify existing config file"
echo "--append-to-config (-a): Keep existing config file and append bash-it templates at the end"
echo "--overwrite-backup (-f): Overwrite existing backup"
exit 0
}
# enable a thing
function _bash-it_load_one() {
file_type=$1
file_to_enable=$2
mkdir -p "$BASH_IT/${file_type}/enabled"
dest="${BASH_IT}/${file_type}/enabled/${file_to_enable}"
if [ ! -e "${dest}" ]; then
ln -sf "../available/${file_to_enable}" "${dest}"
else
echo "File ${dest} exists, skipping"
fi
}
# Interactively enable several things
function _bash-it_load_some() {
file_type=$1
single_type=$(echo "$file_type" | sed -e "s/aliases$/alias/g" | sed -e "s/plugins$/plugin/g")
enable_func="_enable-$single_type"
[ -d "$BASH_IT/$file_type/enabled" ] || mkdir "$BASH_IT/$file_type/enabled"
for path in "$BASH_IT/${file_type}/available/"[^_]*; do
file_name=$(basename "$path")
while true; do
just_the_name="${file_name%%.*}"
read -r -e -n 1 -p "Would you like to enable the $just_the_name $file_type? [y/N] " RESP
case $RESP in
[yY])
$enable_func "$just_the_name"
break
;;
[nN] | "")
break
;;
*)
echo -e "\033[91mPlease choose y or n.\033[m"
;;
esac
done
done
}
# Back up existing profile
function _bash-it_backup() {
test -w "$HOME/$CONFIG_FILE" \
&& cp -aL "$HOME/$CONFIG_FILE" "$HOME/$CONFIG_FILE.bak" \
&& echo -e "\033[0;32mYour original $CONFIG_FILE has been backed up to $CONFIG_FILE.bak\033[0m"
}
# Back up existing profile and create new one for bash-it
function _bash-it_backup_new() {
_bash-it_backup
sed "s|{{BASH_IT}}|$BASH_IT|" "$BASH_IT/template/bash_profile.template.bash" > "$HOME/$CONFIG_FILE"
echo -e "\033[0;32mCopied the template $CONFIG_FILE into ~/$CONFIG_FILE, edit this file to customize bash-it\033[0m"
}
# Back up existing profile and append bash-it templates at the end
function _bash-it_backup_append() {
_bash-it_backup
(sed "s|{{BASH_IT}}|$BASH_IT|" "$BASH_IT/template/bash_profile.template.bash" | tail -n +2) >> "$HOME/$CONFIG_FILE"
echo -e "\033[0;32mBash-it template has been added to your $CONFIG_FILE\033[0m"
}
function _bash-it_check_for_backup() {
if ! [[ -e "$HOME/$BACKUP_FILE" ]]; then
return
fi
echo -e "\033[0;33mBackup file already exists. Make sure to backup your .bashrc before running this installation.\033[0m" >&2
if ! [[ $overwrite_backup ]]; then
while ! [[ $silent ]]; do
read -e -n 1 -r -p "Would you like to overwrite the existing backup? This will delete your existing backup file ($HOME/$BACKUP_FILE) [y/N] " RESP
case $RESP in
[yY])
overwrite_backup=true
break
;;
[nN] | "")
break
;;
*)
echo -e "\033[91mPlease choose y or n.\033[m"
;;
esac
done
fi
if ! [[ $overwrite_backup ]]; then
echo -e "\033[91mInstallation aborted. Please come back soon!\033[m"
if [[ $silent ]]; then
echo -e "\033[91mUse \"-f\" flag to force overwrite of backup.\033[m"
fi
exit 1
else
echo -e "\033[0;32mOverwriting backup...\033[m"
fi
}
function _bash-it_modify_config_files() {
_bash-it_check_for_backup
if ! [[ $silent ]]; then
while ! [[ $append_to_config ]]; do
read -e -n 1 -r -p "Would you like to keep your $CONFIG_FILE and append bash-it templates at the end? [y/N] " choice
case $choice in
[yY])
append_to_config=true
break
;;
[nN] | "")
break
;;
*)
echo -e "\033[91mPlease choose y or n.\033[m"
;;
esac
done
fi
if [[ $append_to_config ]]; then
# backup/append
_bash-it_backup_append
else
# backup/new by default
_bash-it_backup_new
fi
}
for param in "$@"; do
shift
case "$param" in
"--help") set -- "$@" "-h" ;;
"--silent") set -- "$@" "-s" ;;
"--interactive") set -- "$@" "-i" ;;
"--no-modify-config") set -- "$@" "-n" ;;
"--append-to-config") set -- "$@" "-a" ;;
"--overwrite-backup") set -- "$@" "-f" ;;
*) set -- "$@" "$param" ;;
esac
done
OPTIND=1
while getopts "hsinaf" opt; do
case "$opt" in
"h")
_bash-it_show_usage
exit 0
;;
"s") silent=true ;;
"i") interactive=true ;;
"n") no_modify_config=true ;;
"a") append_to_config=true ;;
"f") overwrite_backup=true ;;
"?")
_bash-it_show_usage >&2
exit 1
;;
esac
done
shift $(("$OPTIND" - 1))
if [[ $silent ]] && [[ $interactive ]]; then
echo -e "\033[91mOptions --silent and --interactive are mutually exclusive. Please choose one or the other.\033[m"
exit 1
fi
if [[ $no_modify_config ]] && [[ $append_to_config ]]; then
echo -e "\033[91mOptions --no-modify-config and --append-to-config are mutually exclusive. Please choose one or the other.\033[m"
exit 1
fi
BASH_IT="$(cd "$(dirname "$0")" && pwd)"
case $OSTYPE in
darwin*)
CONFIG_FILE=.bash_profile
;;
*)
CONFIG_FILE=.bashrc
;;
esac
BACKUP_FILE=$CONFIG_FILE.bak
echo "Installing bash-it"
if ! [[ $no_modify_config ]]; then
_bash-it_modify_config_files
fi
# Disable auto-reload in case its enabled
export BASH_IT_AUTOMATIC_RELOAD_AFTER_CONFIG_CHANGE=''
# Load dependencies for enabling components
# shellcheck disable=SC1090
source "${BASH_IT}"/vendor/github.com/erichs/composure/composure.sh
# shellcheck source=./lib/utilities.bash
source "$BASH_IT/lib/utilities.bash"
cite _about _param _example _group _author _version
# shellcheck source=./lib/helpers.bash
source "$BASH_IT/lib/helpers.bash"
if [[ $interactive ]] && ! [[ $silent ]]; then
for type in "aliases" "plugins" "completion"; do
echo -e "\033[0;32mEnabling $type\033[0m"
_bash-it_load_some $type
done
else
echo ""
echo -e "\033[0;32mEnabling reasonable defaults\033[0m"
_enable-completion bash-it
_enable-completion system
_enable-plugin base
_enable-plugin alias-completion
_enable-alias general
fi
echo ""
echo -e "\033[0;32mInstallation finished successfully! Enjoy bash-it!\033[0m"
# shellcheck disable=SC2086
echo -e "\033[0;32mTo start using it, open a new tab or 'source "$HOME/$CONFIG_FILE"'.\033[0m"
echo ""
echo "To show the available aliases/completions/plugins, type one of the following:"
echo " bash-it show aliases"
echo " bash-it show completions"
echo " bash-it show plugins"
echo ""
echo "To avoid issues and to keep your shell lean, please enable only features you really want to use."
echo "Enabling everything can lead to issues."
|
<reponame>Ajaybabunakkala/SampleApp
/*
* Copyright 2017 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.chrishantha.sample.latencies;
import com.beust.jcommander.Parameter;
import com.github.chrishantha.sample.base.SampleApplication;
public class LatenciesApplication implements SampleApplication {
@Parameter(names = "--count", description = "Print Count")
private int count = 50;
private class EvenThread extends Thread {
public EvenThread() {
super("Even-Thread");
}
@Override
public void run() {
for (int i = 0; i < count; i++) {
if (isEven(i)) {
printNumber(i);
}
}
}
}
private class OddThread extends Thread {
public OddThread() {
super("Odd-Thread");
}
@Override
public void run() {
for (int i = 0; i < count; i++) {
if (!isEven(i)) {
printNumber(i);
}
}
}
}
private void printNumber(int i) {
System.out.format("Thread: %s, Number: %d%n", Thread.currentThread().getName(), i);
}
private synchronized boolean isEven(int i) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
return i % 2 == 0;
}
@Override
public void start() {
new EvenThread().start();
new OddThread().start();
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("LatenciesApplication [count=");
builder.append(count);
builder.append("]");
return builder.toString();
}
}
|
<reponame>lananh265/social-network<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_wheelchair_pickup_twotone = void 0;
var ic_wheelchair_pickup_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24",
"x": "0"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24",
"x": "0"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M4.5,4c0-1.11,0.89-2,2-2s2,0.89,2,2s-0.89,2-2,2S4.5,5.11,4.5,4z M10,10.95V9c0-1.1-0.9-2-2-2H5C3.9,7,3,7.9,3,9v6h2v7 h3.5v-0.11c-1.24-1.26-2-2.99-2-4.89C6.5,14.42,7.91,12.16,10,10.95z M16.5,17c0,1.65-1.35,3-3,3s-3-1.35-3-3 c0-1.11,0.61-2.06,1.5-2.58v-2.16C9.98,12.9,8.5,14.77,8.5,17c0,2.76,2.24,5,5,5s5-2.24,5-5H16.5z M19.54,14H15V8h-2v8h5.46 l2.47,3.71l1.66-1.11L19.54,14z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M4.5,4c0-1.11,0.89-2,2-2s2,0.89,2,2s-0.89,2-2,2S4.5,5.11,4.5,4z M10,10.95V9c0-1.1-0.9-2-2-2H5C3.9,7,3,7.9,3,9v6h2v7 h3.5v-0.11c-1.24-1.26-2-2.99-2-4.89C6.5,14.42,7.91,12.16,10,10.95z M16.5,17c0,1.65-1.35,3-3,3s-3-1.35-3-3 c0-1.11,0.61-2.06,1.5-2.58v-2.16C9.98,12.9,8.5,14.77,8.5,17c0,2.76,2.24,5,5,5s5-2.24,5-5H16.5z M19.54,14H15V8h-2v8h5.46 l2.47,3.71l1.66-1.11L19.54,14z"
},
"children": []
}]
}]
}]
};
exports.ic_wheelchair_pickup_twotone = ic_wheelchair_pickup_twotone; |
/**
* Youtube tracker
* A Chrome extension to track Youtube Time
* @copyright 2020 <NAME>, <NAME>, <NAME>, <NAME>
* @author <NAME> luke-zhang-04.github.io/
* @license ISC
*/
import DeStagnate, {createElement} from "destagnate"
import DatePlus from "@luke-zhang-04/dateplus"
interface DashState {
ytTime?: number,
}
class Dashboard extends DeStagnate<undefined, DashState> {
public constructor (parent: HTMLElement) {
super(parent)
this.state = {
ytTime: undefined,
}
}
public componentDidMount = (): void => {
this._setTime()
chrome.storage.onChanged.addListener(() => {
this._setTime()
})
}
/**
* Render function
* @returns renered content
*/
public render = (): HTMLElement => (
createElement("div", {class: "container"},
createElement("p", null,
this._getYtTime(),
createElement(
"button",
{
class: "btn btn-primary",
onClick: this._reset
},
"Reset",
)
)
)
)
/**
* Get time from storage and display set it to state
* @returns void
*/
private _setTime = (): void => chrome.storage.sync.get((items) => {
this.setState({ytTime: Number(items.ytTime)})
})
/**
* Reset storage values
*/
private _reset = (): void => {
if (confirm("Are you sure you want to reset your time?")) {
chrome.storage.sync.set({
ytTime: 0,
lastUsed: new Date().getDate()
})
}
}
private _getYtTime = (): HTMLElement => {
if (this.state.ytTime === undefined) {
return createElement("p", null, "Loading...")
}
const time = DatePlus.msToHours(this.state.ytTime)
return createElement("p", {class: "mt-3"},
`You've spent a total of ${time.hours} hours, ${time.minutes} minutes, and ${time.seconds} seconds on YouTube today.`
)
}
}
const root = document.getElementById("root")
if (root) {
const dash = new Dashboard(root)
dash.mount()
}
|
let vetor = [5,6,9,4,2,5,1,3,312,21,15,11,54,51,22,21,2,1]
let vetorSomado = 0
let divisor = vetor.length
let mediaArit = 0
for (i = 0 ; i < vetor.length ; i++){
vetorSomado += vetor[i]
}
mediaArit = vetorSomado / divisor
console.log(mediaArit.toFixed(2))
console.log(divisor)
|
<filename>src/app/model/testing/index.ts
export * from './test-album';
export * from './test-discography';
export * from './test-sidenav.service';
export * from './test-song';
export * from './test-title.service';
|
package com.atlassian.performance.tools.referencejiraapp.scenario;
import com.atlassian.performance.tools.jiraactions.api.SeededRandom;
import com.atlassian.performance.tools.jiraactions.api.WebJira;
import com.atlassian.performance.tools.jiraactions.api.action.Action;
import com.atlassian.performance.tools.jiraactions.api.action.BrowseProjectsAction;
import com.atlassian.performance.tools.jiraactions.api.action.CreateIssueAction;
import com.atlassian.performance.tools.jiraactions.api.action.SearchJqlAction;
import com.atlassian.performance.tools.jiraactions.api.measure.ActionMeter;
import com.atlassian.performance.tools.jiraactions.api.memories.IssueKeyMemory;
import com.atlassian.performance.tools.jiraactions.api.memories.adaptive.AdaptiveIssueKeyMemory;
import com.atlassian.performance.tools.jiraactions.api.memories.adaptive.AdaptiveJqlMemory;
import com.atlassian.performance.tools.jiraactions.api.memories.adaptive.AdaptiveProjectMemory;
import com.atlassian.performance.tools.jiraactions.api.scenario.Scenario;
import com.google.common.collect.ImmutableList;
import org.jetbrains.annotations.NotNull;
import java.util.List;
public class MyScenario implements Scenario {
@NotNull
@Override
public List<Action> getActions(WebJira webJira, SeededRandom seededRandom, ActionMeter actionMeter) {
final AdaptiveJqlMemory jqlMemory = new AdaptiveJqlMemory(seededRandom);
final IssueKeyMemory issueKeyMemory = new AdaptiveIssueKeyMemory(seededRandom);
final AdaptiveProjectMemory adaptiveProjectMemory = new AdaptiveProjectMemory(seededRandom);
return ImmutableList.of(
new SearchJqlAction(webJira, actionMeter, jqlMemory, issueKeyMemory),
new BrowseProjectsAction(webJira, actionMeter, adaptiveProjectMemory),
new CreateIssueAction(webJira, actionMeter, adaptiveProjectMemory, seededRandom),
new CreateIssueAction(webJira, actionMeter, adaptiveProjectMemory, seededRandom),
new CreateIssueAction(webJira, actionMeter, adaptiveProjectMemory, seededRandom),
new CreateIssueAction(webJira, actionMeter, adaptiveProjectMemory, seededRandom),
new CreateIssueAction(webJira, actionMeter, adaptiveProjectMemory, seededRandom),
new CreateIssueAction(webJira, actionMeter, adaptiveProjectMemory, seededRandom),
new CreateIssueAction(webJira, actionMeter, adaptiveProjectMemory, seededRandom),
new CreateIssueAction(webJira, actionMeter, adaptiveProjectMemory, seededRandom),
new CustomViewIssueAction(webJira, actionMeter, issueKeyMemory)
);
}
}
|
import collections
import itertools
import sqlite3
import typing
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusData
from hydrus.client.db import ClientDBMappingsCounts
from hydrus.client.db import ClientDBMappingsCountsUpdate
from hydrus.client.db import ClientDBMappingsStorage
from hydrus.client.db import ClientDBModule
from hydrus.client.db import ClientDBServices
from hydrus.client.db import ClientDBTagDisplay
from hydrus.client.metadata import ClientTags
def GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id ):
suffix = '{}_{}'.format( file_service_id, tag_service_id )
cache_display_current_mappings_table_name = 'external_caches.specific_display_current_mappings_cache_{}'.format( suffix )
cache_display_pending_mappings_table_name = 'external_caches.specific_display_pending_mappings_cache_{}'.format( suffix )
return ( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name )
class ClientDBMappingsCacheSpecificDisplay( ClientDBModule.ClientDBModule ):
CAN_REPOPULATE_ALL_MISSING_DATA = True
def __init__( self, cursor: sqlite3.Cursor, modules_services: ClientDBServices.ClientDBMasterServices, modules_mappings_counts: ClientDBMappingsCounts.ClientDBMappingsCounts, modules_mappings_counts_update: ClientDBMappingsCountsUpdate.ClientDBMappingsCountsUpdate, modules_mappings_storage: ClientDBMappingsStorage.ClientDBMappingsStorage, modules_tag_display: ClientDBTagDisplay.ClientDBTagDisplay ):
self.modules_services = modules_services
self.modules_mappings_counts = modules_mappings_counts
self.modules_mappings_counts_update = modules_mappings_counts_update
self.modules_mappings_storage = modules_mappings_storage
self.modules_tag_display = modules_tag_display
ClientDBModule.ClientDBModule.__init__( self, 'client specific display mappings cache', cursor )
def _GetServiceIndexGenerationDictSingle( self, file_service_id, tag_service_id ):
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
index_generation_dict = {}
index_generation_dict[ cache_display_current_mappings_table_name ] = [
( [ 'tag_id', 'hash_id' ], True, 400 )
]
index_generation_dict[ cache_display_pending_mappings_table_name ] = [
( [ 'tag_id', 'hash_id' ], True, 400 )
]
return index_generation_dict
def _GetServiceIndexGenerationDict( self, service_id ) -> dict:
tag_service_id = service_id
index_dict = {}
file_service_ids = list( self.modules_services.GetServiceIds( HC.FILE_SERVICES_WITH_SPECIFIC_MAPPING_CACHES ) )
for file_service_id in file_service_ids:
single_index_dict = self._GetServiceIndexGenerationDictSingle( file_service_id, tag_service_id )
index_dict.update( single_index_dict )
return index_dict
def _GetServiceTableGenerationDictSingle( self, file_service_id, tag_service_id ):
table_dict = {}
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
version = 400
table_dict[ cache_display_current_mappings_table_name ] = ( 'CREATE TABLE IF NOT EXISTS {} ( hash_id INTEGER, tag_id INTEGER, PRIMARY KEY ( hash_id, tag_id ) ) WITHOUT ROWID;', version )
table_dict[ cache_display_pending_mappings_table_name ] = ( 'CREATE TABLE IF NOT EXISTS {} ( hash_id INTEGER, tag_id INTEGER, PRIMARY KEY ( hash_id, tag_id ) ) WITHOUT ROWID;', version )
return table_dict
def _GetServiceTableGenerationDict( self, service_id ) -> dict:
tag_service_id = service_id
table_dict = {}
file_service_ids = list( self.modules_services.GetServiceIds( HC.FILE_SERVICES_WITH_SPECIFIC_MAPPING_CACHES ) )
for file_service_id in file_service_ids:
single_table_dict = self._GetServiceTableGenerationDictSingle( file_service_id, tag_service_id )
table_dict.update( single_table_dict )
return table_dict
def _GetServiceIdsWeGenerateDynamicTablesFor( self ):
return self.modules_services.GetServiceIds( HC.REAL_TAG_SERVICES )
def AddFiles( self, file_service_id, tag_service_id, hash_ids, hash_ids_table_name ):
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
( cache_current_mappings_table_name, cache_deleted_mappings_table_name, cache_pending_mappings_table_name ) = ClientDBMappingsStorage.GenerateSpecificMappingsCacheTableNames( file_service_id, tag_service_id )
# temp hashes to mappings
storage_current_mapping_ids_raw = self._Execute( 'SELECT tag_id, hash_id FROM {} CROSS JOIN {} USING ( hash_id );'.format( hash_ids_table_name, cache_current_mappings_table_name ) ).fetchall()
storage_current_mapping_ids_dict = HydrusData.BuildKeyToSetDict( storage_current_mapping_ids_raw )
# temp hashes to mappings
storage_pending_mapping_ids_raw = self._Execute( 'SELECT tag_id, hash_id FROM {} CROSS JOIN {} USING ( hash_id );'.format( hash_ids_table_name, cache_pending_mappings_table_name ) ).fetchall()
storage_pending_mapping_ids_dict = HydrusData.BuildKeyToSetDict( storage_pending_mapping_ids_raw )
all_storage_tag_ids = set( storage_current_mapping_ids_dict.keys() )
all_storage_tag_ids.update( storage_pending_mapping_ids_dict.keys() )
storage_tag_ids_to_implies_tag_ids = self.modules_tag_display.GetTagsToImplies( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, all_storage_tag_ids )
display_tag_ids_to_implied_by_tag_ids = collections.defaultdict( set )
for ( storage_tag_id, implies_tag_ids ) in storage_tag_ids_to_implies_tag_ids.items():
for implies_tag_id in implies_tag_ids:
display_tag_ids_to_implied_by_tag_ids[ implies_tag_id ].add( storage_tag_id )
counts_cache_changes = []
# for all display tags implied by the existing storage mappings, add them
# btw, when we add files to a specific domain, we know that all inserts are new
for ( display_tag_id, implied_by_tag_ids ) in display_tag_ids_to_implied_by_tag_ids.items():
display_current_hash_ids = set( itertools.chain.from_iterable( ( storage_current_mapping_ids_dict[ implied_by_tag_id ] for implied_by_tag_id in implied_by_tag_ids ) ) )
current_delta = len( display_current_hash_ids )
if current_delta > 0:
self._ExecuteMany( 'INSERT OR IGNORE INTO ' + cache_display_current_mappings_table_name + ' ( hash_id, tag_id ) VALUES ( ?, ? );', ( ( hash_id, display_tag_id ) for hash_id in display_current_hash_ids ) )
#
display_pending_hash_ids = set( itertools.chain.from_iterable( ( storage_pending_mapping_ids_dict[ implied_by_tag_id ] for implied_by_tag_id in implied_by_tag_ids ) ) )
pending_delta = len( display_pending_hash_ids )
if pending_delta > 0:
self._ExecuteMany( 'INSERT OR IGNORE INTO ' + cache_display_pending_mappings_table_name + ' ( hash_id, tag_id ) VALUES ( ?, ? );', ( ( hash_id, display_tag_id ) for hash_id in display_pending_hash_ids ) )
#
if current_delta > 0 or pending_delta > 0:
counts_cache_changes.append( ( display_tag_id, current_delta, pending_delta ) )
if len( counts_cache_changes ) > 0:
self.modules_mappings_counts_update.AddCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, counts_cache_changes )
def AddImplications( self, file_service_id, tag_service_id, implication_tag_ids, tag_id, status_hook = None ):
if len( implication_tag_ids ) == 0:
return
( cache_current_mappings_table_name, cache_deleted_mappings_table_name, cache_pending_mappings_table_name ) = ClientDBMappingsStorage.GenerateSpecificMappingsCacheTableNames( file_service_id, tag_service_id )
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
statuses_to_count_delta = collections.Counter()
( current_implication_tag_ids, current_implication_tag_ids_weight, pending_implication_tag_ids, pending_implication_tag_ids_weight ) = self.modules_mappings_counts.GetCurrentPendingPositiveCountsAndWeights( ClientTags.TAG_DISPLAY_STORAGE, file_service_id, tag_service_id, implication_tag_ids )
jobs = []
jobs.append( ( HC.CONTENT_STATUS_CURRENT, cache_display_current_mappings_table_name, cache_current_mappings_table_name, current_implication_tag_ids, current_implication_tag_ids_weight ) )
jobs.append( ( HC.CONTENT_STATUS_PENDING, cache_display_pending_mappings_table_name, cache_pending_mappings_table_name, pending_implication_tag_ids, pending_implication_tag_ids_weight ) )
for ( status, cache_display_mappings_table_name, cache_mappings_table_name, add_tag_ids, add_tag_ids_weight ) in jobs:
if add_tag_ids_weight == 0:
# nothing to actually add, so nbd
continue
if len( add_tag_ids ) == 1:
( add_tag_id, ) = add_tag_ids
self._Execute( 'INSERT OR IGNORE INTO {} ( hash_id, tag_id ) SELECT hash_id, ? FROM {} WHERE tag_id = ?;'.format( cache_display_mappings_table_name, cache_mappings_table_name ), ( tag_id, add_tag_id ) )
statuses_to_count_delta[ status ] = self._GetRowCount()
else:
with self._MakeTemporaryIntegerTable( add_tag_ids, 'tag_id' ) as temp_tag_ids_table_name:
# for all new implications, get files with those tags and not existing
self._Execute( 'INSERT OR IGNORE INTO {} ( hash_id, tag_id ) SELECT hash_id, ? FROM {} CROSS JOIN {} USING ( tag_id );'.format( cache_display_mappings_table_name, temp_tag_ids_table_name, cache_mappings_table_name ), ( tag_id, ) )
statuses_to_count_delta[ status ] = self._GetRowCount()
current_delta = statuses_to_count_delta[ HC.CONTENT_STATUS_CURRENT ]
pending_delta = statuses_to_count_delta[ HC.CONTENT_STATUS_PENDING ]
if current_delta > 0 or pending_delta > 0:
counts_cache_changes = ( ( tag_id, current_delta, pending_delta ), )
self.modules_mappings_counts_update.AddCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, counts_cache_changes )
def AddMappings( self, file_service_id, tag_service_id, tag_id, hash_ids ):
# this guy doesn't do rescind pend because of storage calculation issues that need that to occur before deletes to storage tables
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
display_tag_ids = self.modules_tag_display.GetImplies( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, tag_id )
ac_counts = collections.Counter()
for display_tag_id in display_tag_ids:
self._ExecuteMany( 'INSERT OR IGNORE INTO ' + cache_display_current_mappings_table_name + ' ( hash_id, tag_id ) VALUES ( ?, ? );', ( ( hash_id, display_tag_id ) for hash_id in hash_ids ) )
num_added = self._GetRowCount()
if num_added > 0:
ac_counts[ display_tag_id ] += num_added
if len( ac_counts ) > 0:
counts_cache_changes = [ ( tag_id, current_delta, 0 ) for ( tag_id, current_delta ) in ac_counts.items() ]
self.modules_mappings_counts_update.AddCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, counts_cache_changes )
def Clear( self, file_service_id, tag_service_id, keep_pending = False ):
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
self._Execute( 'DELETE FROM {};'.format( cache_display_current_mappings_table_name ) )
if not keep_pending:
self._Execute( 'DELETE FROM {};'.format( cache_display_pending_mappings_table_name ) )
self.modules_mappings_counts.ClearCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, keep_pending = keep_pending )
def Drop( self, file_service_id, tag_service_id ):
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
self._Execute( 'DROP TABLE IF EXISTS {};'.format( cache_display_current_mappings_table_name ) )
self._Execute( 'DROP TABLE IF EXISTS {};'.format( cache_display_pending_mappings_table_name ) )
self.modules_mappings_counts.DropTables( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id )
def DeleteFiles( self, file_service_id, tag_service_id, hash_ids, hash_id_table_name ):
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
# temp hashes to mappings
current_mapping_ids_raw = self._Execute( 'SELECT tag_id, hash_id FROM {} CROSS JOIN {} USING ( hash_id );'.format( hash_id_table_name, cache_display_current_mappings_table_name ) ).fetchall()
current_mapping_ids_dict = HydrusData.BuildKeyToSetDict( current_mapping_ids_raw )
# temp hashes to mappings
pending_mapping_ids_raw = self._Execute( 'SELECT tag_id, hash_id FROM {} CROSS JOIN {} USING ( hash_id );'.format( hash_id_table_name, cache_display_pending_mappings_table_name ) ).fetchall()
pending_mapping_ids_dict = HydrusData.BuildKeyToSetDict( pending_mapping_ids_raw )
all_ids_seen = set( current_mapping_ids_dict.keys() )
all_ids_seen.update( pending_mapping_ids_dict.keys() )
counts_cache_changes = []
for tag_id in all_ids_seen:
current_hash_ids = current_mapping_ids_dict[ tag_id ]
num_current = len( current_hash_ids )
#
pending_hash_ids = pending_mapping_ids_dict[ tag_id ]
num_pending = len( pending_hash_ids )
counts_cache_changes.append( ( tag_id, num_current, num_pending ) )
self._ExecuteMany( 'DELETE FROM ' + cache_display_current_mappings_table_name + ' WHERE hash_id = ?;', ( ( hash_id, ) for hash_id in hash_ids ) )
self._ExecuteMany( 'DELETE FROM ' + cache_display_pending_mappings_table_name + ' WHERE hash_id = ?;', ( ( hash_id, ) for hash_id in hash_ids ) )
if len( counts_cache_changes ) > 0:
self.modules_mappings_counts_update.ReduceCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, counts_cache_changes )
def DeleteImplications( self, file_service_id, tag_service_id, implication_tag_ids, tag_id, status_hook = None ):
if len( implication_tag_ids ) == 0:
return
statuses_to_count_delta = collections.Counter()
( cache_current_mappings_table_name, cache_deleted_mappings_table_name, cache_pending_mappings_table_name ) = ClientDBMappingsStorage.GenerateSpecificMappingsCacheTableNames( file_service_id, tag_service_id )
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
remaining_implication_tag_ids = set( self.modules_tag_display.GetImpliedBy( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, tag_id ) ).difference( implication_tag_ids )
( current_implication_tag_ids, current_implication_tag_ids_weight, pending_implication_tag_ids, pending_implication_tag_ids_weight ) = self.modules_mappings_counts.GetCurrentPendingPositiveCountsAndWeights( ClientTags.TAG_DISPLAY_STORAGE, file_service_id, tag_service_id, implication_tag_ids )
( current_remaining_implication_tag_ids, current_remaining_implication_tag_ids_weight, pending_remaining_implication_tag_ids, pending_remaining_implication_tag_ids_weight ) = self.modules_mappings_counts.GetCurrentPendingPositiveCountsAndWeights( ClientTags.TAG_DISPLAY_STORAGE, file_service_id, tag_service_id, remaining_implication_tag_ids )
jobs = []
jobs.append( ( HC.CONTENT_STATUS_CURRENT, cache_display_current_mappings_table_name, cache_current_mappings_table_name, current_implication_tag_ids, current_implication_tag_ids_weight, current_remaining_implication_tag_ids, current_remaining_implication_tag_ids_weight ) )
jobs.append( ( HC.CONTENT_STATUS_PENDING, cache_display_pending_mappings_table_name, cache_pending_mappings_table_name, pending_implication_tag_ids, pending_implication_tag_ids_weight, pending_remaining_implication_tag_ids, pending_remaining_implication_tag_ids_weight ) )
for ( status, cache_display_mappings_table_name, cache_mappings_table_name, removee_tag_ids, removee_tag_ids_weight, keep_tag_ids, keep_tag_ids_weight ) in jobs:
if removee_tag_ids_weight == 0:
# nothing to remove, so nothing to do!
continue
# ultimately here, we are doing "delete all display mappings with hash_ids that have a storage mapping for a removee tag and no storage mappings for a keep tag
# in order to reduce overhead, we go full meme and do a bunch of different situations
with self._MakeTemporaryIntegerTable( [], 'tag_id' ) as temp_removee_tag_ids_table_name:
with self._MakeTemporaryIntegerTable( [], 'tag_id' ) as temp_keep_tag_ids_table_name:
if len( removee_tag_ids ) == 1:
( removee_tag_id, ) = removee_tag_ids
hash_id_in_storage_remove = 'hash_id IN ( SELECT hash_id FROM {} WHERE tag_id = {} )'.format( cache_mappings_table_name, removee_tag_id )
else:
self._ExecuteMany( 'INSERT INTO {} ( tag_id ) VALUES ( ? );'.format( temp_removee_tag_ids_table_name ), ( ( removee_tag_id, ) for removee_tag_id in removee_tag_ids ) )
hash_id_in_storage_remove = 'hash_id IN ( SELECT DISTINCT hash_id FROM {} CROSS JOIN {} USING ( tag_id ) )'.format( temp_removee_tag_ids_table_name, cache_mappings_table_name )
if keep_tag_ids_weight == 0:
predicates_phrase = hash_id_in_storage_remove
else:
# WARNING, WARNING: Big Brain Query, potentially great/awful
# note that in the 'clever/file join' situation, the number of total mappings is many, but we are deleting a few
# we want to precisely scan the status of the potential hashes to delete, not scan through them all to see what not to do
# therefore, we do NOT EXISTS, which just scans the parts, rather than NOT IN, which does the whole query and then checks against all results
if len( keep_tag_ids ) == 1:
( keep_tag_id, ) = keep_tag_ids
if ClientDBMappingsStorage.DoingAFileJoinTagSearchIsFaster( removee_tag_ids_weight, keep_tag_ids_weight ):
hash_id_not_in_storage_keep = 'NOT EXISTS ( SELECT 1 FROM {} WHERE {}.hash_id = {}.hash_id and tag_id = {} )'.format( cache_mappings_table_name, cache_display_mappings_table_name, cache_mappings_table_name, keep_tag_id )
else:
hash_id_not_in_storage_keep = 'hash_id NOT IN ( SELECT hash_id FROM {} WHERE tag_id = {} )'.format( cache_mappings_table_name, keep_tag_id )
else:
self._ExecuteMany( 'INSERT INTO {} ( tag_id ) VALUES ( ? );'.format( temp_keep_tag_ids_table_name ), ( ( keep_tag_id, ) for keep_tag_id in keep_tag_ids ) )
if ClientDBMappingsStorage.DoingAFileJoinTagSearchIsFaster( removee_tag_ids_weight, keep_tag_ids_weight ):
# (files to) mappings to temp tags
hash_id_not_in_storage_keep = 'NOT EXISTS ( SELECT 1 FROM {} CROSS JOIN {} USING ( tag_id ) WHERE {}.hash_id = {}.hash_id )'.format( cache_mappings_table_name, temp_keep_tag_ids_table_name, cache_display_mappings_table_name, cache_mappings_table_name )
else:
# temp tags to mappings
hash_id_not_in_storage_keep = ' hash_id NOT IN ( SELECT DISTINCT hash_id FROM {} CROSS JOIN {} USING ( tag_id ) )'.format( temp_keep_tag_ids_table_name, cache_mappings_table_name )
predicates_phrase = '{} AND {}'.format( hash_id_in_storage_remove, hash_id_not_in_storage_keep )
query = 'DELETE FROM {} WHERE tag_id = {} AND {};'.format( cache_display_mappings_table_name, tag_id, predicates_phrase )
self._Execute( query )
statuses_to_count_delta[ status ] = self._GetRowCount()
current_delta = statuses_to_count_delta[ HC.CONTENT_STATUS_CURRENT ]
pending_delta = statuses_to_count_delta[ HC.CONTENT_STATUS_PENDING ]
if current_delta > 0 or pending_delta > 0:
counts_cache_changes = ( ( tag_id, current_delta, pending_delta ), )
self.modules_mappings_counts_update.ReduceCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, counts_cache_changes )
def DeleteMappings( self, file_service_id, tag_service_id, storage_tag_id, hash_ids ):
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
implies_tag_ids = self.modules_tag_display.GetImplies( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, storage_tag_id )
implies_tag_ids_to_implied_by_tag_ids = self.modules_tag_display.GetTagsToImpliedBy( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, implies_tag_ids, tags_are_ideal = True )
ac_counts = collections.Counter()
for ( display_tag_id, implied_by_tag_ids ) in implies_tag_ids_to_implied_by_tag_ids.items():
# for every tag implied by the storage tag being removed
other_implied_by_tag_ids = set( implied_by_tag_ids )
other_implied_by_tag_ids.discard( storage_tag_id )
if len( other_implied_by_tag_ids ) == 0:
# nothing else implies this tag on display, so can just straight up delete
self._ExecuteMany( 'DELETE FROM {} WHERE tag_id = ? AND hash_id = ?;'.format( cache_display_current_mappings_table_name ), ( ( display_tag_id, hash_id ) for hash_id in hash_ids ) )
num_deleted = self._GetRowCount()
else:
# other things imply this tag on display, so we need to check storage to see what else has it
statuses_to_table_names = self.modules_mappings_storage.GetFastestStorageMappingTableNames( file_service_id, tag_service_id )
mappings_table_name = statuses_to_table_names[ HC.CONTENT_STATUS_CURRENT ]
with self._MakeTemporaryIntegerTable( other_implied_by_tag_ids, 'tag_id' ) as temp_table_name:
delete = 'DELETE FROM {} WHERE tag_id = ? AND hash_id = ? AND NOT EXISTS ( SELECT 1 FROM {} CROSS JOIN {} USING ( tag_id ) WHERE hash_id = ? );'.format( cache_display_current_mappings_table_name, mappings_table_name, temp_table_name )
self._ExecuteMany( delete, ( ( display_tag_id, hash_id, hash_id ) for hash_id in hash_ids ) )
num_deleted = self._GetRowCount()
if num_deleted > 0:
ac_counts[ display_tag_id ] += num_deleted
if len( ac_counts ) > 0:
counts_cache_changes = [ ( tag_id, current_delta, 0 ) for ( tag_id, current_delta ) in ac_counts.items() ]
self.modules_mappings_counts_update.ReduceCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, counts_cache_changes )
def Generate( self, file_service_id, tag_service_id, populate_from_storage = True, status_hook = None ):
table_generation_dict = self._GetServiceTableGenerationDictSingle( file_service_id, tag_service_id )
for ( table_name, ( create_query_without_name, version_added ) ) in table_generation_dict.items():
self._Execute( create_query_without_name.format( table_name ) )
if populate_from_storage:
if status_hook is not None:
status_hook( 'copying storage' )
( cache_current_mappings_table_name, cache_deleted_mappings_table_name, cache_pending_mappings_table_name ) = ClientDBMappingsStorage.GenerateSpecificMappingsCacheTableNames( file_service_id, tag_service_id )
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
self._Execute( 'INSERT OR IGNORE INTO {} ( hash_id, tag_id ) SELECT hash_id, tag_id FROM {};'.format( cache_display_current_mappings_table_name, cache_current_mappings_table_name ) )
self._Execute( 'INSERT OR IGNORE INTO {} ( hash_id, tag_id ) SELECT hash_id, tag_id FROM {};'.format( cache_display_pending_mappings_table_name, cache_pending_mappings_table_name ) )
self.modules_mappings_counts.CreateTables( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, populate_from_storage = populate_from_storage )
if status_hook is not None:
status_hook( 'optimising data' )
index_generation_dict = self._GetServiceIndexGenerationDictSingle( file_service_id, tag_service_id )
for ( table_name, columns, unique, version_added ) in self._FlattenIndexGenerationDict( index_generation_dict ):
self._CreateIndex( table_name, columns, unique = unique )
def GetTablesAndColumnsThatUseDefinitions( self, content_type: int ) -> typing.List[ typing.Tuple[ str, str ] ]:
tables_and_columns = []
if content_type == HC.CONTENT_TYPE_TAG:
table_dict = self._GetServicesTableGenerationDict()
for table_name in table_dict.keys():
tables_and_columns.append( ( table_name, 'tag_id' ) )
elif content_type == HC.CONTENT_TYPE_HASH:
table_dict = self._GetServicesTableGenerationDict()
for table_name in table_dict.keys():
tables_and_columns.append( ( table_name, 'hash_id' ) )
return tables_and_columns
def PendMappings( self, file_service_id, tag_service_id, tag_id, hash_ids ):
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
ac_counts = collections.Counter()
display_tag_ids = self.modules_tag_display.GetImplies( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, tag_id )
for display_tag_id in display_tag_ids:
self._ExecuteMany( 'INSERT OR IGNORE INTO ' + cache_display_pending_mappings_table_name + ' ( hash_id, tag_id ) VALUES ( ?, ? );', ( ( hash_id, display_tag_id ) for hash_id in hash_ids ) )
num_added = self._GetRowCount()
if num_added > 0:
ac_counts[ display_tag_id ] += num_added
if len( ac_counts ) > 0:
counts_cache_changes = [ ( tag_id, 0, pending_delta ) for ( tag_id, pending_delta ) in ac_counts.items() ]
self.modules_mappings_counts_update.AddCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, counts_cache_changes )
def RegeneratePending( self, file_service_id, tag_service_id, status_hook = None ):
( cache_current_mappings_table_name, cache_deleted_mappings_table_name, cache_pending_mappings_table_name ) = ClientDBMappingsStorage.GenerateSpecificMappingsCacheTableNames( file_service_id, tag_service_id )
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
if status_hook is not None:
message = 'clearing old specific display data'
status_hook( message )
all_pending_storage_tag_ids = self._STS( self._Execute( 'SELECT DISTINCT tag_id FROM {};'.format( cache_pending_mappings_table_name ) ) )
storage_tag_ids_to_display_tag_ids = self.modules_tag_display.GetTagsToImplies( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, all_pending_storage_tag_ids )
all_pending_display_tag_ids = set( itertools.chain.from_iterable( storage_tag_ids_to_display_tag_ids.values() ) )
del all_pending_storage_tag_ids
del storage_tag_ids_to_display_tag_ids
self.modules_mappings_counts.ClearCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, keep_current = True )
self._Execute( 'DELETE FROM {};'.format( cache_display_pending_mappings_table_name ) )
all_pending_display_tag_ids_to_implied_by_storage_tag_ids = self.modules_tag_display.GetTagsToImpliedBy( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, all_pending_display_tag_ids, tags_are_ideal = True )
counts_cache_changes = []
num_to_do = len( all_pending_display_tag_ids_to_implied_by_storage_tag_ids )
for ( i, ( display_tag_id, storage_tag_ids ) ) in enumerate( all_pending_display_tag_ids_to_implied_by_storage_tag_ids.items() ):
if i % 100 == 0 and status_hook is not None:
message = 'regenerating pending tags {}'.format( HydrusData.ConvertValueRangeToPrettyString( i + 1, num_to_do ) )
status_hook( message )
if len( storage_tag_ids ) == 1:
( storage_tag_id, ) = storage_tag_ids
self._Execute( 'INSERT OR IGNORE INTO {} ( tag_id, hash_id ) SELECT ?, hash_id FROM {} WHERE tag_id = ?;'.format( cache_display_pending_mappings_table_name, cache_pending_mappings_table_name ), ( display_tag_id, storage_tag_id ) )
pending_delta = self._GetRowCount()
else:
with self._MakeTemporaryIntegerTable( storage_tag_ids, 'tag_id' ) as temp_tag_ids_table_name:
# temp tags to mappings merged
self._Execute( 'INSERT OR IGNORE INTO {} ( tag_id, hash_id ) SELECT DISTINCT ?, hash_id FROM {} CROSS JOIN {} USING ( tag_id );'.format( cache_display_pending_mappings_table_name, temp_tag_ids_table_name, cache_pending_mappings_table_name ), ( display_tag_id, ) )
pending_delta = self._GetRowCount()
counts_cache_changes.append( ( display_tag_id, 0, pending_delta ) )
self.modules_mappings_counts_update.AddCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, counts_cache_changes )
def RescindPendingMappings( self, file_service_id, tag_service_id, storage_tag_id, hash_ids ):
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
implies_tag_ids = self.modules_tag_display.GetImplies( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, storage_tag_id )
implies_tag_ids_to_implied_by_tag_ids = self.modules_tag_display.GetTagsToImpliedBy( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, implies_tag_ids, tags_are_ideal = True )
ac_counts = collections.Counter()
for ( display_tag_id, implied_by_tag_ids ) in implies_tag_ids_to_implied_by_tag_ids.items():
# for every tag implied by the storage tag being removed
other_implied_by_tag_ids = set( implied_by_tag_ids )
other_implied_by_tag_ids.discard( storage_tag_id )
if len( other_implied_by_tag_ids ) == 0:
# nothing else implies this tag on display, so can just straight up delete
self._ExecuteMany( 'DELETE FROM {} WHERE tag_id = ? AND hash_id = ?;'.format( cache_display_pending_mappings_table_name ), ( ( display_tag_id, hash_id ) for hash_id in hash_ids ) )
num_rescinded = self._GetRowCount()
else:
# other things imply this tag on display, so we need to check storage to see what else has it
statuses_to_table_names = self.modules_mappings_storage.GetFastestStorageMappingTableNames( file_service_id, tag_service_id )
mappings_table_name = statuses_to_table_names[ HC.CONTENT_STATUS_PENDING ]
with self._MakeTemporaryIntegerTable( other_implied_by_tag_ids, 'tag_id' ) as temp_table_name:
# storage mappings to temp other tag ids
# delete mappings where it shouldn't exist for other reasons lad
delete = 'DELETE FROM {} WHERE tag_id = ? AND hash_id = ? AND NOT EXISTS ( SELECT 1 FROM {} CROSS JOIN {} USING ( tag_id ) WHERE hash_id = ? )'.format( cache_display_pending_mappings_table_name, mappings_table_name, temp_table_name )
self._ExecuteMany( delete, ( ( display_tag_id, hash_id, hash_id ) for hash_id in hash_ids ) )
num_rescinded = self._GetRowCount()
if num_rescinded > 0:
ac_counts[ display_tag_id ] += num_rescinded
if len( ac_counts ) > 0:
counts_cache_changes = [ ( tag_id, 0, pending_delta ) for ( tag_id, pending_delta ) in ac_counts.items() ]
self.modules_mappings_counts_update.ReduceCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_id, tag_service_id, counts_cache_changes )
|
const { Node, humanReadableList, swapEveryTwoNodes } = require('.');
test('List 1 -> 2 -> 3 -> 4 should equal 2 -> 1 -> 4 -> 3 after swapping every two', () => {
const list = new Node(1, new Node(2, new Node(3, new Node(4))));
expect(humanReadableList(swapEveryTwoNodes(list))).toEqual(
'2 -> 1 -> 4 -> 3',
);
});
|
#ifndef __8EB22FACEE81CD2F4E35__GAME__HPP__
#define __8EB22FACEE81CD2F4E35__GAME__HPP__
typedef struct hs
{
char name[11];
char date[11];
int score;
}st_high_score;
typedef struct op
{
bool autosave;
bool music;
bool sound;
bool rumblep1;
bool rumblep2;
bool pal;
char theme[256];
char p1name[11];
char p2name[11];
}st_options;
#include <arch/rtc.h>
#include <time.h>
#include <stdlib.h>
#include <kos.h>
#include "../../dccommon/src/vmu.hpp"
#include "../../dccommon/src/linkedlist.hpp"
#include "player.hpp"
#include "sound/sound.hpp"
#include "highscore.hpp"
#include "theme/themeinfo.hpp"
#include "sound/music.hpp"
class CDraw;
class CGame
{
private:
CLinkedList *m_pThemeList;
CThemeInfo *m_pCurrentThemeInfo, *m_pPreviewThemeInfo;
CBackground *m_pPreviewBackground;
CDraw *m_pDraw;
int m_iPlayers, m_iStatus, m_iMainMenuPos, m_iOptionMenuPos, m_iGameMode, m_iThemeBrowserMenuPos;
int m_iVMUMenuPos, m_iMusicVolume, m_iSoundVolume, m_iTVMenuPos;
int m_iPauseMenuPos;
bool m_bMusic, m_bSound, m_bAutoSave, m_bTVModeLoaded, m_bPal;
CPlayer *m_pPlayers[2];
CSound *m_pSound;
CMusic *m_pMusic;
CHighScore *m_pHighScore;
int m_iGameType;
CVMU *m_pVMU;
maple_device_t *m_pActiveVMU;
public:
bool GetTVModeLoaded(){return(m_bTVModeLoaded);};
void SetTVModeLoaded(bool modeLoaded){m_bTVModeLoaded=modeLoaded;};
void SetPal(bool pal){m_bPal=pal;};
bool GetPal(){return(m_bPal);};
void SetPlayMusic(bool music);
bool GetPlayMusic(){return(m_bMusic);};
void SetMusic(CMusic *music);
CMusic* GetMusic(){return(m_pMusic);};
// Menu position data
void SetMainMenuPos(int pos){m_iMainMenuPos=pos;};
int GetMainMenuPos(){return(m_iMainMenuPos);};
void SetOptionMenuPos(int pos){m_iOptionMenuPos=pos;};
int GetOptionMenuPos(){return(m_iOptionMenuPos);};
void SetTVMenuPos(int pos){m_iTVMenuPos=pos;};
int GetTVMenuPos(){return(m_iTVMenuPos);};
void SetPauseMenuPos(int pos){m_iPauseMenuPos=pos;};
int GetPauseMenuPos(){return(m_iPauseMenuPos);};
void SetThemeBrowserMenuPos(int pos){m_iThemeBrowserMenuPos=pos;};
int GetThemeBrowserMenuPos(){return(m_iThemeBrowserMenuPos);};
// Theme handeling functions
void SetThemeList(CLinkedList *list){m_pThemeList=list;};
CThemeInfo* GetThemeByName(const char *name);
CThemeInfo* GetThemeByPath(const char *path);
// 0 based pos
CThemeInfo* GetThemeByPos(int pos);
CLinkedList* GetThemeList(){return(m_pThemeList);};
void SetCurrentTheme(CThemeInfo *info){m_pCurrentThemeInfo=info;};
CThemeInfo* GetCurrentTheme(){return(m_pCurrentThemeInfo);};
void SetPreviewTheme(CThemeInfo *info){m_pPreviewThemeInfo=info;};
CThemeInfo* GetPreviewTheme(){return(m_pPreviewThemeInfo);};
void SetPreviewBackground(CBackground *back){m_pPreviewBackground=back;};
CBackground* GetPreviewBackground(){return(m_pPreviewBackground);};
// Other functions
int GetVMUMenuPos(){return(m_iVMUMenuPos);};
void SetVMUMenuPos(int pos){m_iVMUMenuPos=pos;};
void SetDraw(CDraw *draw){m_pDraw=draw;};
CDraw* GetDraw(){return(m_pDraw);};
bool Save(maple_device_t *dev);
bool Load(maple_device_t *dev);
void SetVMU(CVMU *vmu);
CVMU* GetVMU(){return(m_pVMU);};
void Ready();
int GetActivePlayers();
maple_device_t* GetActiveVMU(){return(m_pActiveVMU);};
void SetActiveVMU(maple_device_t *dev){m_pActiveVMU=dev;};
bool AddPlayer(CPlayer *player);
float XX,YY;
CGame();
~CGame();
static const int STATUS_STOP=80;
static const int STATUS_RUN=81;
static const int STATUS_PAUSE=82;
static const int STATUS_GAMEOVER=83;
static const int STATUS_MAIN_MENU=84;
static const int STATUS_OPTION_MENU=85;
static const int STATUS_EXIT=86;
static const int STATUS_MODE_SELECT=89;
static const int STATUS_HIGH_SCORE=90;
static const int STATUS_CREDITS=91;
static const int STATUS_VMU_LOAD_MENU=92;
static const int STATUS_VMU_SAVE_MENU=93;
static const int STATUS_TVMODE_SELECT=94;
static const int STATUS_INIT=95;
static const int STATUS_THEME_BROWSER=96;
static const int STATUS_THEME_BROWSER_INFO=97;
static const int STATUS_THEME_BROWSER_PREVIEW=98;
static const int STATUS_TVMODE_TEST=99;
static const int GAME_MODE_1PLAYER=110;
static const int GAME_MODE_2PLAYER=111;
static const int GAME_MODE_NONE=112;
static const int GAME_TYPE_NORMAL=130;
static const int GAME_TYPE_FLIP=131;
static const int GAME_TYPE_FAST_COMBAT=132;
static const int GAME_TYPE_COMBAT=133;
void SetGameMode(int mode){m_iGameMode=mode;};
int GetGameMode(){return(m_iGameMode);};
// Sound -- Start
void PlaySound(int id,int vol=-1,int pan=128);
void SetSound(bool sound){m_bSound=sound;};
bool GetSound(){return(m_bSound);};
void SetSoundVolume(int volume){m_iSoundVolume=volume;};
int GetSoundVolume(){return(m_iSoundVolume);};
// Sound -- End
int GetStatus(){return(m_iStatus);};
void SetStatus(int status){m_iStatus=status;};
void SetPlayers(int players){m_iPlayers=players;};
CPlayer* GetPlayer(int player){return(m_pPlayers[player]);};
int GetPlayers(){return(m_iPlayers);};
void SetAutoSave(bool save){m_bAutoSave=save;};
bool GetAutoSave(){return(m_bAutoSave);};
void SetHighScore();
CHighScore* GetHighScore(){return(m_pHighScore);};
int GetGameType(){return(m_iGameType);};
void SetGameType(int gameType){m_iGameType=gameType;};
};
#endif // __8EB22FACEE81CD2F4E35__GAME__HPP__
|
#!/bin/bash
function findAppLabel()
{
appName=`echo ${VCAP_APPLICATION} | sed -e 's/,\"/&\n\"/g;s/\"//g;s/,//g'| grep application_name | cut -d: -f2`
appInst=`echo ${VCAP_APPLICATION} | sed -e 's/,\"/&\n\"/g;s/\"//g;s/,//g'| grep instance_index| cut -d: -f2`
echo ${appName}-${appInst}
}
function oldFindAppLabel()
{
old_IFS=$IFS
IFS=","
for envAppContent in `cat /home/vcap/logs/env.log`
do
#if [[ "$envAppContent" == *instance_index* ]]; then
# appInst=`echo $envAppContent | sed -e 's/\"//g;s/instance_index://g;s/^[ \t]*//;s/[ \t]*$//'`
#elif [[ "$envAppContent" == *application_name* ]]; then
# appName=`echo $envAppContent | sed -e 's/\"//g;s/application_name://g;s/^[ \t]*//;s/[ \t]*$//'`
#fi
case "$envAppContent" in
*instance_index* )
appInst=`echo $envAppContent | sed -e 's/\"//g;s/instance_index://g;s/^[ \t]*//;s/[ \t]*$//'`;;
*application_name* )
appName=`echo $envAppContent | sed -e 's/\"//g;s/application_name://g;s/^[ \t]*//;s/[ \t]*$//'`;;
esac
done
IFS=$old_IFS
echo ${appName}-${appInst}
}
function findTargetType()
{
old_IFS=$IFS
IFS=$'\n'
appType="RUBY"
for process in `ps aux --sort rss | tail -5`
do
#if [[ "$process" == *\/java* ]]; then
# appType="JAVA"
#elif [[ "$process" == *\/ruby* ]]; then
# appType="RUBY"
#fi
case "$process" in
*\/java* )
appType="JAVA";;
*\/ruby* )
appType="RUBY";;
esac
done
IFS=$old_IFS
echo ${appType}
}
function touchAndSaveTimestamp()
{
`touch $DUMP_MONITOR_TARGET`
lastSavedAccessTimestamp=`stat -c %X $DUMP_MONITOR_TARGET`
}
SLEEP_INTERVAL=30
DUMP_FOLDER="/home/vcap/dumps"
mkdir -p $DUMP_FOLDER 2>/dev/null
|
<filename>web-utils/src/main/java/elasta/webutils/query/string/JsonObjectToQueryStringConverter.java<gh_stars>1-10
package elasta.webutils.query.string;
import io.vertx.core.json.JsonObject;
/**
* Created by Jango on 2016-11-20.
*/
public interface JsonObjectToQueryStringConverter extends ObjectToQueryStringConverter<JsonObject> {
}
|
def remove_redundancy(list):
result = []
for ele in list:
if ele not in result:
result.append(ele)
return result
list = [1,2,2,3,3,3,4,4,4,4]
remove_redundancy(list) |
<reponame>open-crypto-portfolio/neo4j-ogm
/*
* Copyright (c) 2002-2019 "Neo4j,"
* Neo4j Sweden AB [http://neo4j.com]
*
* This file is part of Neo4j.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.neo4j.ogm.session.delegates;
import static org.assertj.core.api.Assertions.*;
import java.io.IOException;
import java.util.Arrays;
import java.util.Date;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.neo4j.ogm.cypher.ComparisonOperator;
import org.neo4j.ogm.cypher.Filter;
import org.neo4j.ogm.domain.music.Album;
import org.neo4j.ogm.session.Neo4jSession;
import org.neo4j.ogm.session.Session;
import org.neo4j.ogm.session.SessionFactory;
import org.neo4j.ogm.testutil.MultiDriverTestClass;
import org.neo4j.ogm.typeconversion.DateLongConverter;
import org.neo4j.ogm.typeconversion.DateStringConverter;
/**
* Integration tests for the session delegate.
*
* @author <NAME>
*/
public class SessionDelegateIntegrationTest extends MultiDriverTestClass {
private static SessionFactory sessionFactory;
private Session session;
@BeforeClass
public static void createSessionFactory() {
sessionFactory = new SessionFactory(driver, "org.neo4j.ogm.domain.music");
}
@Before
public void init() throws IOException {
session = sessionFactory.openSession();
}
@Test // DATAGRAPH-933
public void shouldPickupCorrectFieldInfo() {
final Date filterValue = new Date();
final Filter recordedAtFilter = new Filter("recordedAt", ComparisonOperator.GREATER_THAN, filterValue);
final Filter releasedFilter = new Filter("released", ComparisonOperator.GREATER_THAN, filterValue);
final Filter releasedAtFilter = new Filter("releasedAt", ComparisonOperator.GREATER_THAN, filterValue);
final Filter enteredChartAtFilter = new Filter("enteredChartAt", ComparisonOperator.GREATER_THAN, filterValue);
final Filter leftChartFilter = new Filter("leftChart", ComparisonOperator.GREATER_THAN, filterValue);
final Filter leftChartAtFilter = new Filter("leftChartAt", ComparisonOperator.GREATER_THAN, filterValue);
final SessionDelegate sessionDelegate = new LoadByTypeDelegate((Neo4jSession) session);
sessionDelegate.resolvePropertyAnnotations(Album.class, Arrays.asList(
recordedAtFilter,
releasedFilter,
releasedAtFilter,
enteredChartAtFilter,
leftChartFilter,
leftChartAtFilter
));
assertThat(recordedAtFilter.getPropertyConverter())
.as("Property converter %s should be used for Date fields without @Property-annotation",
DateStringConverter.class)
.isInstanceOf(DateStringConverter.class);
assertThat(releasedFilter.getPropertyConverter())
.as("Property converter %s should be used for Date fields with @Property-annotation referred by field name",
DateStringConverter.class)
.isInstanceOf(DateStringConverter.class);
assertThat(releasedAtFilter.getPropertyConverter())
.as("Property converter %s should be used for Date fields with @Property-annotation referred by property name",
DateStringConverter.class)
.isInstanceOf(DateStringConverter.class);
assertThat(enteredChartAtFilter.getPropertyConverter())
.as("Specified provider should be used")
.isInstanceOf(DateLongConverter.class);
assertThat(leftChartFilter.getPropertyConverter())
.as("Specified provider should be used")
.isInstanceOf(DateLongConverter.class);
assertThat(leftChartAtFilter.getPropertyConverter())
.as("Specified provider should be used")
.isInstanceOf(DateLongConverter.class);
}
}
|
#!/usr/bin/env bash
DOMAIN_NAME=$1
username=$2
echo "Welcome to Reset Password for a Active Directory User"
echo
if [ -z "${DOMAIN_NAME}" ]; then
read -p 'Enter Domain: ' DOMAIN_NAME
fi
if [ -z "${username}" ]; then
read -p 'Username: ' username
fi
read -sp "Enter currennt password of user $username@$DOMAIN_NAME: " password
if [[ -z ${DOMAIN_NAME} || -z ${username} || -z ${password} ]]; then
echo
echo
echo "ERROR: Invalid argument specified"
exit 1
fi
pass1='Secret*123'
pass2='Secret%321'
echo
echo -e "$password\n$pass1\n$pass1" | (smbpasswd -s -U $username -r $DOMAIN_NAME)
echo -e "$pass1\n$pass2\n$pass2" | (smbpasswd -s -U $username -r $DOMAIN_NAME)
echo -e "$pass2\n$password\n$password" | (smbpasswd -s -U $username -r $DOMAIN_NAME)
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.stackExchange = void 0;
var stackExchange = {
"viewBox": "0 0 1280 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M1259 1253v66q0 85-57.5 144.5t-138.5 59.5h-57l-260 269v-269h-529q-81 0-138.5-59.5t-57.5-144.5v-66h1238zM1259 927v255h-1238v-255h1238zM1259 599v255h-1238v-255h1238zM1259 459v67h-1238v-67q0-84 57.5-143.5t138.5-59.5h846q81 0 138.5 59.5t57.5 143.5z"
}
}]
};
exports.stackExchange = stackExchange; |
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.alignLeft = void 0;
var alignLeft = {
"viewBox": "0 0 1792 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M1792 1344v128q0 26-19 45t-45 19h-1664q-26 0-45-19t-19-45v-128q0-26 19-45t45-19h1664q26 0 45 19t19 45zM1408 960v128q0 26-19 45t-45 19h-1280q-26 0-45-19t-19-45v-128q0-26 19-45t45-19h1280q26 0 45 19t19 45zM1664 576v128q0 26-19 45t-45 19h-1536q-26 0-45-19t-19-45v-128q0-26 19-45t45-19h1536q26 0 45 19t19 45zM1280 192v128q0 26-19 45t-45 19h-1152q-26 0-45-19t-19-45v-128q0-26 19-45t45-19h1152q26 0 45 19t19 45z"
}
}]
};
exports.alignLeft = alignLeft; |
<reponame>fsanchezvilela/cinemapp<filename>src/movies/view/state/store.ts<gh_stars>0
import { configureStore } from '@reduxjs/toolkit';
// Or from '@reduxjs/toolkit/query/react'
import { setupListeners } from '@reduxjs/toolkit/query';
import { MoviesRepository } from '../../data/repositories/MoviesRepository';
import moviesReducer from './reducer/reducer';
// ...
export const store = configureStore({
reducer: moviesReducer,
middleware: (getDefaultMiddleware) =>
getDefaultMiddleware().concat(MoviesRepository.middleware),
});
// optional, but required for refetchOnFocus/refetchOnReconnect behaviors
// see `setupListeners` docs - takes an optional callback as the 2nd arg for customization
setupListeners(store.dispatch);
// Infer the `RootState` and `AppDispatch` types from the store itself
export type RootState = ReturnType<typeof store.getState>;
// Inferred type: {posts: PostsState, comments: CommentsState, users: UsersState}
export type MovieDispatch = typeof store.dispatch;
|
/* eslint-disable
strict,
lines-around-directive,
import/order,
import/no-extraneous-dependencies,
no-restricted-syntax
*/
'use strict';
const helpers = require('./helpers.js');
const glob = require('glob');
const path = require('path');
const webpack = require('webpack');
const webpackMerge = require('webpack-merge');
const ExtractTextPlugin = require('extract-text-webpack-plugin');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const HtmlWebpackHarddiskPlugin = require('html-webpack-harddisk-plugin');
const CleanWebpackPlugin = require('clean-webpack-plugin');
const ImageminPlugin = require('imagemin-webpack-plugin').default;
const CopyWebpackPlugin = require('copy-webpack-plugin');
// const WriteFilePlugin = require('write-file-webpack-plugin');
module.exports = function({
env = 'development',
outputDir = 'build',
srcJS = 'app',
} = {}) {
const OUTPUT_DIR = outputDir;
const JS_VARIANT = srcJS;
const PROD = !!(env === 'production');
const DEV_SERVER = !!(env === 'dev-server');
const PAGES = glob.sync(helpers.rootPath('./src/pug/*.pug'), {
nodir: true,
nonull: false,
}).map(filename => path.parse(filename).name);
const QUERY = {
fonts: ['outputPath=fonts/', 'publicPath=../fonts/', 'name=[name].[ext]', 'limit=10000'].join('&'),
images: ['outputPath=img/', 'publicPath=../img/', 'name=[name].[ext]'].join('&'),
template_images: ['outputPath=img/', 'publicPath=assets/img/', 'name=[name].[ext]'].join('&'),
files: ['outputPath=files/', 'publicPath=../files/', 'name=[name].[ext]'].join('&'),
template_files: ['outputPath=files/', 'publicPath=assets/files/', 'name=[name].[ext]'].join('&'),
};
const CSSNANO_OPTIONS = PROD ? {
// cssnano options
autoprefixer: false,
zindex: false,
} : false;
let defaults = {
resolve: {
modules: [
'node_modules',
helpers.rootPath('./src/js/'),
helpers.rootPath('./src/vendor/'),
],
alias: {
Vendors: helpers.rootPath('./src/vendor/'),
Scripts: helpers.rootPath('./src/js/'),
Styles: helpers.rootPath('./src/styles/'),
Templates: helpers.rootPath('./src/pug/'),
Images: helpers.rootPath('./src/img/'),
Fonts: helpers.rootPath('./src/fonts/'),
},
extensions: ['.js', '.jsx'],
},
};
let config = webpackMerge(defaults, {
// devtool: 'cheap-module-eval-source-map',
devtool: 'source-map',
context: helpers.rootPath(),
entry: {
vendor: [
'dom4',
'delegated-events',
// 'jquery',
'./src/js/vendor/vendor.js',
],
application: [
'./src/styles/vendors.css',
'./src/styles/vendors.scss',
'./src/styles/application.scss',
`./src/js/${JS_VARIANT}/application.js`,
],
},
output: {
path: helpers.rootPath(OUTPUT_DIR, 'assets/'),
publicPath: 'assets/',
filename: 'js/[name].js', // .[hash:4]
chunkFilename: 'js/[id].[name].js', // .[chunkhash:4]
library: 'Markup',
},
module: {
rules: [{
enforce: 'pre',
test: /\.(js|jsx)$/,
include: helpers.rootPath('./src/js/'),
use: 'eslint-loader',
}, {
test: /\.(js|jsx)$/,
exclude: RegExp(`node_modules|src(\\|/)vendor|${OUTPUT_DIR}`),
loader: 'babel-loader',
options: {
cacheDirectory: true,
},
}, {
test: /\.(js|jsx)$/,
include: helpers.rootPath('./src/vendor/'),
use: 'script-loader',
}, {
test: /\.(sass|scss)$/,
use: ExtractTextPlugin.extract({
fallback: 'style-loader',
use: [{
loader: 'css-loader',
options: {
minimize: CSSNANO_OPTIONS,
sourceMap: true,
importLoaders: 3,
},
}, {
loader: 'postcss-loader',
options: {
sourceMap: true,
},
}, {
loader: 'sass-loader',
options: {
precision: 8,
sourceMap: true,
// includePaths: [helpers.rootPath('./src/styles/sass/')],
},
}],
}),
}, {
test: /\.css$/,
use: ExtractTextPlugin.extract({
fallback: 'style-loader',
use: [{
loader: 'css-loader',
options: {
minimize: CSSNANO_OPTIONS,
sourceMap: true,
importLoaders: 1,
},
}, {
loader: 'postcss-loader',
options: {
sourceMap: true,
},
}],
}),
}, {
test: /\.(pug)$/,
loader: 'pug-loader',
options: {
root: helpers.rootPath('./src/pug/'),
pretty: false,
},
},
{ test: /\.woff(\?.*)?$/, use: `url-loader?mimetype=application/font-woff&${QUERY.fonts}` },
{ test: /\.woff2(\?.*)?$/, use: `url-loader?mimetype=application/font-woff2&${QUERY.fonts}` },
{ test: /\.otf(\?.*)?$/, use: `url-loader?mimetype=font/opentype&${QUERY.fonts}` },
{ test: /\.ttf(\?.*)?$/, use: `url-loader?mimetype=application/octet-stream&${QUERY.fonts}` },
{ test: /\.eot(\?.*)?$/, use: `file-loader?${QUERY.fonts}` },
{
test: /\.svg(\?.*)?$/,
include: /fonts?(\\|\/)/,
issuer: { exclude: helpers.rootPath('./src/pug/') },
use: `url-loader?mimetype=image/svg+xml&${QUERY.fonts}`,
}, {
test: /\.svg(\?.*)?$/,
exclude: /fonts?(\\|\/)/,
issuer: { exclude: helpers.rootPath('./src/pug/') },
use: `file-loader?${QUERY.images}`,
}, {
test: /\.(png|jpe?g|gif)$/,
issuer: { exclude: helpers.rootPath('./src/pug/') },
use: `file-loader?${QUERY.images}`,
}, {
test: /\.(png|jpe?g|gif|svg)$/,
issuer: { include: helpers.rootPath('./src/pug/') },
use: `file-loader?${QUERY.template_images}`,
}, {
test: /\.(xlsx?|docx?|pdf|zip|rar|mpe?g4|webm)$/i,
issuer: { exclude: helpers.rootPath('./src/pug/') },
use: `file-loader?${QUERY.files}`,
}, {
test: /\.(xlsx?|docx?|pdf|zip|rar|mpe?g4|webm)$/i,
issuer: { include: helpers.rootPath('./src/pug/') },
use: `file-loader?${QUERY.template_files}`,
}],
},
plugins: [
new webpack.NamedModulesPlugin(),
new webpack.DefinePlugin({
PROD: JSON.stringify(PROD),
}),
new webpack.ProvidePlugin({
'$': 'jquery',
'jQuery': 'jquery',
'window.jQuery': 'jquery',
}),
new webpack.optimize.CommonsChunkPlugin({
name: 'vendor',
minChunks(module) {
return module.context && module.context.indexOf('node_modules') !== -1;
},
}),
new webpack.optimize.CommonsChunkPlugin({
names: ['common'],
}),
new ExtractTextPlugin({ filename: 'css/[name].css', allChunks: true }), // .[contenthash:4]
new CopyWebpackPlugin([
{ from: 'src/game', to: 'game' },
], {
cache: true,
}),
],
watchOptions: {
ignored: RegExp(`node_modules|src(\\|/)vendor|${OUTPUT_DIR}`),
},
devServer: {
// historyApiFallback: true,
contentBase: helpers.rootPath(OUTPUT_DIR),
publicPath: 'http://localhost:9000/assets/',
compress: false,
host: '0.0.0.0',
port: 9000,
disableHostCheck: true,
stats: 'minimal',
overlay: {
warnings: false,
errors: true,
},
headers: { 'Access-Control-Allow-Origin': '*' },
staticOptions: {
index: '*.*',
},
},
});
// Build static pages
for (let tpl of PAGES) {
config.plugins.push(
new HtmlWebpackPlugin({
alwaysWriteToDisk: true,
inject: false,
filename: helpers.rootPath(OUTPUT_DIR, `${tpl}.html`),
template: helpers.rootPath(`./src/pug/${tpl}.pug`),
minify: {
collapseWhitespace: true,
// conservativeCollapse: true,
collapseInlineTagWhitespace: false,
},
env: {
prod: PROD,
pages: PAGES,
},
}),
);
}
config.plugins.push(new HtmlWebpackHarddiskPlugin());
// if (DEV_SERVER) {
// config = webpackMerge(config, {
// plugins: [
// new WriteFilePlugin({
// test: /game(\\|\/)/,
// useHashIndex: true,
// }),
// ],
// });
// }
if (!DEV_SERVER) {
config = webpackMerge(config, {
plugins: [
new CleanWebpackPlugin([OUTPUT_DIR], {
root: helpers.rootPath(),
// exclude: ['shared.js'],
verbose: true,
dry: DEV_SERVER,
}),
],
});
}
// Configuration alterations for production
if (PROD) {
config = webpackMerge(config, {
devtool: 'source-map',
plugins: [
new ImageminPlugin({
test: /\.(jpe?g|png|gif|svg)$/i,
}),
new webpack.LoaderOptionsPlugin({
minimize: true,
debug: false,
}),
new webpack.optimize.UglifyJsPlugin({
compress: {
drop_console: true,
unused: true,
dead_code: true,
warnings: false,
},
}),
],
});
}
return config;
};
|
#!/bin/sh
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
set -e
ROOTDIR=dist
BUNDLE="${ROOTDIR}/Steffy-Qt.app"
CODESIGN=codesign
TEMPDIR=sign.temp
TEMPLIST=${TEMPDIR}/signatures.txt
OUT=signature.tar.gz
OUTROOT=osx
if [ ! -n "$1" ]; then
echo "usage: $0 <codesign args>"
echo "example: $0 -s MyIdentity"
exit 1
fi
rm -rf ${TEMPDIR} ${TEMPLIST}
mkdir -p ${TEMPDIR}
${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
grep -v CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
SIZE=`pagestuff "$i" -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff "$i" -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}.sign"
DIRNAME="`dirname "${SIGNFILE}"`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if="$i" of="${SIGNFILE}" bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
grep CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: "${TARGETFILE}""
cp "${i}" "${RESOURCE}"
done
rm ${TEMPLIST}
tar -C "${TEMPDIR}" -czf "${OUT}" .
rm -rf "${TEMPDIR}"
echo "Created ${OUT}"
|
<gh_stars>0
package com.honyum.elevatorMan.activity.worker;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import com.honyum.elevatorMan.R;
import com.honyum.elevatorMan.adapter.FixPaymentAdapter;
import com.honyum.elevatorMan.base.BaseActivityWraper;
import com.honyum.elevatorMan.base.ListItemCallback;
import com.honyum.elevatorMan.data.FixComponent;
import com.honyum.elevatorMan.data.FixInfo;
import com.honyum.elevatorMan.net.FixNextTimeRequest;
import com.honyum.elevatorMan.net.FixPaymentRequest;
import com.honyum.elevatorMan.net.base.NetConstant;
import com.honyum.elevatorMan.net.base.NetTask;
import com.honyum.elevatorMan.net.base.NewRequestHead;
import com.honyum.elevatorMan.net.base.RequestBean;
import com.honyum.elevatorMan.net.base.Response;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import static com.honyum.elevatorMan.net.base.NetConstant.RSP_CODE_SUC_0;
/**
* Created by Star on 2017/6/13. 订单页面
*/
public class FixPaymentActivity extends BaseActivityWraper implements ListItemCallback<FixComponent> {
@BindView(R.id.rlv_pay_list)
ListView rlvPayList;
@BindView(R.id.tv_componentname)
TextView tvComponentname;
@BindView(R.id.tv_moneycount)
TextView tvMoneycount;
@BindView(R.id.iv_remove_item)
ImageView ivRemoveItem;
@BindView(R.id.tv_fix_payment)
TextView tvFixPayment;
@BindView(R.id.tv_add_item)
TextView tvAddItem;
private FixPaymentAdapter mFixPaymentAdapter;
private FixInfo mFixInfo;
private List<FixComponent> datas = new ArrayList<FixComponent>();
@Override
public String getTitleString() {
return getString(R.string.paymentsubmit);
}
@Override
protected void initView() {
ivRemoveItem.setVisibility(View.GONE);
mFixInfo = getIntent("Info");
mFixPaymentAdapter = new FixPaymentAdapter(this,datas);
rlvPayList.setAdapter(mFixPaymentAdapter);
tvComponentname.setTextColor(getResources().getColor(R.color.color_list_indexred));
tvComponentname.setText(R.string.count);
tvMoneycount.setText("¥0.0");
}
//算钱
private double countMoney()
{
double result = 0.0;
for (int i = 0 ; i<datas.size();i++)
{
result +=datas.get(i).getPrice();
}
return result;
}
@Override
protected int getLayoutID() {
return R.layout.activity_pay_list;
}
@OnClick({R.id.tv_fix_payment,R.id.tv_add_item})
public void onViewClicked(View view) {
switch (view.getId()) {
case R.id.tv_fix_payment:
requestAddPriceDetails();
break;
case R.id.tv_add_item:
showAddDialog();
break;
}
}
public void showAddDialog(){
AlertDialog.Builder builder = new AlertDialog.Builder(FixPaymentActivity.this);
// 通过LayoutInflater来加载一个xml的布局文件作为一个View对象
View view = LayoutInflater.from(FixPaymentActivity.this).inflate(R.layout.dia_addpayment, null);
// 设置我们自己定义的布局文件作为弹出框的Content
builder.setView(view);
final EditText fee = (EditText)view.findViewById(R.id.et_fee);
final EditText name = (EditText)view.findViewById(R.id.et_name);
builder.setPositiveButton("确定", new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dialog, int which)
{
if(isNumber(fee.getText().toString().trim())) {
datas.add(new FixComponent().setName(name.getText().toString().trim()).
setPrice(Double.valueOf(fee.getText().toString().trim())).
setRepairOrderId(mFixInfo.getId()));
tvMoneycount.setText(countMoney() + "");
mFixPaymentAdapter.notifyDataSetChanged();
}
else
{
showToast("请输入合法金额!");
}
}
});
builder.setNegativeButton("取消", new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dialog, int which)
{
dialog.dismiss();
}
});
builder.show();
}
private void requestAddPriceDetails() {
NetTask task = new NetTask(getConfig().getServer() + NetConstant.URL_FIX_PAY_ADD,
getRequestBean(getConfig().getUserId(), getConfig().getToken())) {
@Override
protected void onResponse(NetTask task, String result) {
Response response = Response.getResponse(result);
if (response.getHead() != null && response.getHead().getRspCode().equals(RSP_CODE_SUC_0)) {
showAppToast(getString(R.string.sucess));
finish();
}
}
};
addTask(task);
}
/**
* 判断字符串是否是数字
*/
public boolean isNumber(String value) {
return isInteger(value) || isDouble(value);
}
/**
* 判断字符串是否是整数
*/
public boolean isInteger(String value) {
try {
Integer.parseInt(value);
return true;
} catch (NumberFormatException e) {
return false;
}
}
/**
* 判断字符串是否是浮点数
*/
public boolean isDouble(String value) {
try {
Double.parseDouble(value);
if (value.contains("."))
return true;
return false;
} catch (NumberFormatException e) {
return false;
}
}
private RequestBean getRequestBean(String userId, String token) {
FixPaymentRequest request = new FixPaymentRequest();
request.setHead(new NewRequestHead().setaccessToken(token).setuserId(userId));
request.setBody(datas);
return request;
}
@Override
public void performItemCallback(FixComponent data) {
datas.remove(data);
tvMoneycount.setText("¥"+countMoney());
mFixPaymentAdapter.notifyDataSetChanged();
}
}
|
<reponame>3rdvision/ripe-sdk<filename>src/js/api/locale.js
if (
typeof require !== "undefined" &&
(typeof window === "undefined" ||
// eslint-disable-next-line camelcase
typeof __webpack_require__ !== "undefined" ||
(typeof navigator !== "undefined" && navigator.product === "ReactNative"))
) {
// eslint-disable-next-line no-redeclare,no-var
var base = require("../base");
// eslint-disable-next-line no-redeclare,no-var
var ripe = base.ripe;
}
/**
* Localizes a value to the provided locale.
*
* @param {String} value The value to be localized.
* @param {String} locale The locale to localize the value to.
* @param {Object} options An object of options to configure the request.
* @param {Function} callback Function with the result of the request.
* @returns {XMLHttpRequest} The XMLHttpRequest instance of the API request.
*/
ripe.Ripe.prototype.locale_ = function(value, locale, options, callback) {
return this.localeMultiple(value, locale, options, callback);
};
ripe.Ripe.prototype.localeP = function(value, locale, options) {
return new Promise((resolve, reject) => {
this.locale_(value, locale, options, (result, isValid, request) => {
isValid ? resolve(result) : reject(new ripe.RemoteError(request, null, result));
});
});
};
/**
* Localizes a list of values to the provided locale.
*
* @param {String} values The values to be localized.
* @param {String} locale The locale to localize the value to.
* @param {Object} options An object of options to configure the request.
* @param {Function} callback Function with the result of the request.
* @returns {XMLHttpRequest} The XMLHttpRequest instance of the API request.
*/
ripe.Ripe.prototype.localeMultiple = function(values, locale, options, callback) {
values = typeof values === "string" ? [values] : values;
callback = typeof options === "function" ? options : callback;
options = typeof options === "function" || options === undefined ? {} : options;
const url = `${this.url}locale`;
options = Object.assign(options, {
url: url,
method: "GET",
params: {
values: values,
locale: locale
}
});
options = this._build(options);
return this._cacheURL(options.url, options, callback);
};
ripe.Ripe.prototype.localeMultipleP = function(values, locale, options) {
return new Promise((resolve, reject) => {
this.localeMultiple(values, locale, options, (result, isValid, request) => {
isValid ? resolve(result) : reject(new ripe.RemoteError(request, null, result));
});
});
};
/**
* Retrieves a bundle of locales for the provided locale value.
*
* @param {String} locale The locale string to retrieve the bundle.
* @param {String} context The inner context for the locale bundle.
* @param {Object} options An object of options to configure the request.
* @param {Function} callback Function with the result of the request.
* @returns {XMLHttpRequest} The XMLHttpRequest instance of the API request.
*/
ripe.Ripe.prototype.localeBundle = function(locale, context, options, callback) {
callback = typeof options === "function" ? options : callback;
options = typeof options === "function" || options === undefined ? {} : options;
const url = `${this.url}locale/bundle`;
options = Object.assign(options, {
url: url,
method: "GET",
params: {
locale: locale,
context: context
}
});
options = this._build(options);
return this._cacheURL(options.url, options, callback);
};
ripe.Ripe.prototype.localeBundleP = function(locale, context, options) {
return new Promise((resolve, reject) => {
this.localeBundle(locale, context, options, (result, isValid, request) => {
isValid ? resolve(result) : reject(new ripe.RemoteError(request, null, result));
});
});
};
|
<filename>feeder/parsers/solar.py
import arrow, gzip, json, pygrib, subprocess
BASE = '00'
MULTIPLE = 6
SW = [-48.66, 28.17]
NE = [37.45, 67.71]
def get_url(origin, horizon):
return 'http://nomads.ncep.noaa.gov/cgi-bin/filter_cfs_flx.pl?' + \
'file=flxf%s.01.%s.grb2' % (horizon.format('YYYYMMDDHH'), origin.format('YYYYMMDDHH')) + \
'&lev_surface=on&var_DLWRF=on&var_DSWRF=on&leftlon=%d&rightlon=%d&toplat=%d&bottomlat=%d' % (180 + SW[0], 180 + NE[0], NE[1], SW[1]) + \
'&dir=%%2Fcfs.%s%%2F%s%%2F6hrly_grib_01' % (origin.format('YYYYMMDD'), origin.format('HH'))
def fetch_forecast(origin, horizon):
try:
print 'Fetching forecast of %s made at %s' % (horizon, origin)
subprocess.check_call(['wget', '-nv', get_url(origin, horizon), '-O', 'solar.grb2'], shell=False)
except subprocess.CalledProcessError:
origin = origin.replace(hours=-MULTIPLE)
print 'Trying instead to fetch forecast of %s made at %s' % (horizon, origin)
subprocess.check_call(['wget', '-nv', get_url(origin, horizon), '-O', 'solar.grb2'], shell=False)
with pygrib.open('solar.grb2') as f:
#print f.select(name='Downward long-wave radiation flux', level=0)
grb_LW = f.select(name='Downward long-wave radiation flux', level=0)[-1]
grb_SW = f.select(name='Downward short-wave radiation flux', level=0)[-1]
return {
'lonlats': [grb_LW['longitudes'].tolist(), grb_LW['latitudes'].tolist()],
'DLWRF': grb_LW['values'].tolist(),
'DSWRF': grb_SW['values'].tolist(),
'horizon': horizon.isoformat(),
'date': origin.isoformat()
}
def fetch_solar():
horizon = arrow.utcnow().floor('hour')
while (int(horizon.format('HH')) % MULTIPLE) != 0:
horizon = horizon.replace(hours=-1)
origin = horizon
obj_before = fetch_forecast(origin, horizon)
obj_after = fetch_forecast(origin, horizon.replace(hours=+MULTIPLE))
obj = {
'forecasts': [obj_before, obj_after]
}
with gzip.open('data/solar.json.gz', 'w') as f:
json.dump(obj, f)
print 'Done'
if __name__ == '__main__':
fetch_solar()
|
<reponame>addcolouragency/craft_storefront
import { _Pick } from './Pick';
import { Key } from '../Any/Key';
import { Keys } from '../Any/Keys';
import { RequiredFlat } from './Required';
import { Extends } from '../Any/Extends';
import { ComputeRaw } from '../Any/Compute';
import { OptionalFlat } from './Optional';
/**
* @hidden
*/
declare type RequiredIfKeys<O extends object, K extends Key> = Extends<keyof O & K, K> extends 1 ? RequiredFlat<O> : O;
/**
* @hidden
*/
declare type __AtLeast<O extends object, K extends Key> = K extends keyof O ? _Pick<O, K> & OptionalFlat<O> : O;
/**
* @hidden
*/
declare type _AtLeast<O extends object, K extends Key> = ComputeRaw<__AtLeast<RequiredIfKeys<O, K>, K>>;
/**
* Make that at least one of the keys `K` are required in `O` at a time.
* @param O to make required
* @param K (?=`keyof O`) to choose fields
* @returns [[Object]] [[Union]]
* @example
* ```ts
* ```
*/
export declare type AtLeast<O extends object, K extends Key = Keys<O>> = O extends unknown ? _AtLeast<O, K> : never;
export {};
|
<gh_stars>1-10
package objects
import (
"github.com/stretchr/testify/assert"
"testing"
)
func Test_Item_MoveTo(t *testing.T) {
atom := NewItem("Atom")
atom.MoveTo(Point{1, 1})
atom.MoveTo(Point{2, 3})
expected := Point{3, 4}
if expected != atom.Pos() {
t.Logf("Item not moved correct: expected=%v, is=%v", expected, atom.Pos())
t.Fail()
}
}
func Test_Item_MoveTo_WithAssert(t *testing.T) {
atom := NewItem("Atom")
atom.MoveTo(Point{1, 1})
atom.MoveTo(Point{2, 3})
a := assert.New(t)
a.Equal(Point{3, 4}, atom.Pos())
}
func Test_Item_MoveInDirection(t *testing.T) {
atom := NewItem("Atom")
atom.MoveInDirection(Point{1, 2}, 5)
a := assert.New(t)
a.Equal(Point{5, 10}, atom.Pos())
}
|
<reponame>garima-softuvo/Flask_practice<filename>Dummy_code/file__tst.py
import os
#Read files from the folder
path = r"/home/softuvo/Garima/Flask Practice/Flask_practice/files"
files =os.listdir(path)
print(files)
for f in files:
filename=os.path.join(path, f)
print(filename)
# def convertToBinaryData(filename):
# Convert digital data to binary format
for p in path2:
with open(path2, 'rb') as file:
binaryData = file.read()
# return binaryData
# #join paths
# path_joined = os.path.join("path", "SampleCSVFile_119kb.csv")
# # print(path_joined)
# # print(path)
# #reading the file data after joining the path
# # with open("path_joined", "r") as f:
# # read = f.read()
# # print(read)
# for file in files:
# print(file)
# with open(file, "rb") as file1:
# binary = file1.read()
# print(binary)
# with open("/home/softuvo/Garima/Flask Practice/Flask_practice/files/SampleCSVFile_119kb.csv", "rb") as file:
# binary = file.read()
|
<reponame>anthochristen/resteasy
package org.jboss.resteasy.test.validation;
import java.util.Iterator;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.OperateOnDeployment;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.resteasy.api.validation.ResteasyConstraintViolation;
import org.jboss.resteasy.api.validation.ResteasyViolationException;
import org.jboss.resteasy.api.validation.Validation;
import org.jboss.resteasy.plugins.validation.ResteasyViolationExceptionImpl;
import org.jboss.resteasy.test.validation.resource.ViolationExceptionConstraint;
import org.jboss.resteasy.test.validation.resource.ViolationExceptionLengthConstraint;
import org.jboss.resteasy.test.validation.resource.ViolationExceptionLengthValidator;
import org.jboss.resteasy.test.validation.resource.ViolationExceptionMinMaxValidator;
import org.jboss.resteasy.test.validation.resource.ViolationExceptionObject;
import org.jboss.resteasy.test.validation.resource.ViolationExceptionReaderWriter;
import org.jboss.resteasy.test.validation.resource.ViolationExceptionResourceWithFiveViolations;
import org.jboss.resteasy.test.validation.resource.ViolationExceptionResourceWithReturnValues;
import org.jboss.resteasy.spi.HttpResponseCodes;
import org.jboss.resteasy.utils.TestUtil;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import jakarta.ws.rs.client.Client;
import jakarta.ws.rs.client.ClientBuilder;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.Response;
import static org.jboss.resteasy.utils.PortProviderUtil.generateURL;
/**
* @tpSubChapter Validator provider
* @tpChapter Integration tests
* @tpSince RESTEasy 3.0.16
*/
@RunWith(Arquillian.class)
@RunAsClient
public class ResteasyViolationExceptionRepresentationTest {
protected static final Logger logger = LogManager.getLogger(ResteasyViolationExceptionRepresentationTest.class.getName());
static Client client;
private static final String TEST_VIOLATIONS_BEFORE_RETURN_VALUE = "violations_test";
private static final String TEST_RETURN_VALUES = "return_value";
public static Archive<?> deploy(Class<?> resourceClass, String name) throws Exception {
WebArchive war = TestUtil.prepareArchive(name);
war.addClass(ResteasyViolationExceptionRepresentationTest.class);
war.addClass(ViolationExceptionConstraint.class);
war.addClass(ViolationExceptionLengthConstraint.class);
war.addClass(ViolationExceptionLengthValidator.class);
war.addClass(ViolationExceptionMinMaxValidator.class);
war.addClass(ViolationExceptionObject.class);
return TestUtil.finishContainerPrepare(war, null, resourceClass, ViolationExceptionReaderWriter.class);
}
@Deployment(name = TEST_RETURN_VALUES)
public static Archive<?> testReturnValuesDeploy() throws Exception {
return deploy(ViolationExceptionResourceWithReturnValues.class, TEST_RETURN_VALUES);
}
@Deployment(name = TEST_VIOLATIONS_BEFORE_RETURN_VALUE)
public static Archive<?> testViolationsBeforeReturnValueDeploy() throws Exception {
return deploy(ViolationExceptionResourceWithFiveViolations.class, TEST_VIOLATIONS_BEFORE_RETURN_VALUE);
}
@Before
public void init() {
client = ClientBuilder.newClient();
client.register(ViolationExceptionReaderWriter.class);
}
@After
public void after() throws Exception {
client.close();
}
/**
* @tpTestDetails Check correct number of return value violations.
* @tpPassCrit Violation count should be correct according to resource definition.
* @tpSince RESTEasy 3.0.16
*/
@Test
@OperateOnDeployment(TEST_RETURN_VALUES)
public void testReturnValues() throws Exception {
// Valid native constraint
ViolationExceptionObject foo = new ViolationExceptionObject("a");
Response response = client.target(generateURL("/native", TEST_RETURN_VALUES)).request()
.post(Entity.entity(foo, "application/foo"));
Assert.assertEquals(HttpResponseCodes.SC_OK, response.getStatus());
Assert.assertEquals("Server send wrong content", foo, response.readEntity(ViolationExceptionObject.class));
// Valid imposed constraint
foo = new ViolationExceptionObject("abcde");
response = client.target(generateURL("/imposed", TEST_RETURN_VALUES)).request().post(Entity.entity(foo, "application/foo"));
Assert.assertEquals(HttpResponseCodes.SC_OK, response.getStatus());
response.bufferEntity();
Assert.assertEquals("Server send wrong content", foo, response.readEntity(ViolationExceptionObject.class));
// Valid native and imposed constraints.
foo = new ViolationExceptionObject("abc");
response = client.target(generateURL("/nativeAndImposed", TEST_RETURN_VALUES)).request()
.post(Entity.entity(foo, "application/foo"));
Assert.assertEquals(HttpResponseCodes.SC_OK, response.getStatus());
Assert.assertEquals("Server send wrong content", foo, response.readEntity(ViolationExceptionObject.class));
// Invalid native constraint
response = client.target(generateURL("/native", TEST_RETURN_VALUES)).request()
.post(Entity.entity(new ViolationExceptionObject("abcdef"), "application/foo"));
Assert.assertEquals(HttpResponseCodes.SC_INTERNAL_SERVER_ERROR, response.getStatus());
String header = response.getStringHeaders().getFirst(Validation.VALIDATION_HEADER);
Assert.assertNotNull("Header of response should not be null", header);
Assert.assertTrue("Validation header is not correct", Boolean.valueOf(header));
Object entity = response.readEntity(String.class);
logger.info("Entity from response: " + entity);
ResteasyViolationException e = new ResteasyViolationExceptionImpl(String.class.cast(entity));
logger.info("Received exception: " + e.toString());
TestUtil.countViolations(e, 1, 0, 0, 0, 1);
ResteasyConstraintViolation cv = e.getReturnValueViolations().iterator().next();
Assert.assertEquals("Exception has wrong message", cv.getMessage(), "s must have length: 1 <= length <= 3");
Assert.assertEquals("Exception has wrong value", "Foo[abcdef]", cv.getValue());
// Invalid imposed constraint
response = client.target(generateURL("/imposed", TEST_RETURN_VALUES)).request()
.post(Entity.entity(new ViolationExceptionObject("abcdef"), "application/foo"));
Assert.assertEquals(HttpResponseCodes.SC_INTERNAL_SERVER_ERROR, response.getStatus());
header = response.getStringHeaders().getFirst(Validation.VALIDATION_HEADER);
Assert.assertNotNull("Header of response should not be null", header);
Assert.assertTrue("Validation header is not correct", Boolean.valueOf(header));
entity = response.readEntity(String.class);
logger.info("Entity from response: " + entity);
e = new ResteasyViolationExceptionImpl(String.class.cast(entity));
TestUtil.countViolations(e, 1, 0, 0, 0, 1);
cv = e.getReturnValueViolations().iterator().next();
Assert.assertEquals("Exception has wrong message", cv.getMessage(), "s must have length: 3 <= length <= 5");
Assert.assertEquals("Exception has wrong value", "Foo[abcdef]", cv.getValue());
// Invalid native and imposed constraints
response = client.target(generateURL("/nativeAndImposed", TEST_RETURN_VALUES)).request()
.post(Entity.entity(new ViolationExceptionObject("abcdef"), "application/foo"));
Assert.assertEquals(HttpResponseCodes.SC_INTERNAL_SERVER_ERROR, response.getStatus());
header = response.getStringHeaders().getFirst(Validation.VALIDATION_HEADER);
Assert.assertNotNull("Header of response should not be null", header);
Assert.assertTrue("Validation header is not correct", Boolean.valueOf(header));
entity = response.readEntity(String.class);
logger.info("Entity from response: " + entity);
e = new ResteasyViolationExceptionImpl(String.class.cast(entity));
TestUtil.countViolations(e, 2, 0, 0, 0, 2);
Iterator<ResteasyConstraintViolation> it = e.getReturnValueViolations().iterator();
ResteasyConstraintViolation cv1 = it.next();
ResteasyConstraintViolation cv2 = it.next();
if (!cv1.toString().contains("1")) {
ResteasyConstraintViolation temp = cv1;
cv1 = cv2;
cv2 = temp;
}
Assert.assertEquals("Exception has wrong message", cv1.getMessage(), "s must have length: 1 <= length <= 3");
Assert.assertEquals("Exception has wrong value", "Foo[abcdef]", cv1.getValue());
Assert.assertEquals("Exception has wrong message", cv2.getMessage(), "s must have length: 3 <= length <= 5");
Assert.assertEquals("Exception has wrong value", "Foo[abcdef]", cv2.getValue());
}
/**
* @tpTestDetails Check correct number of violations before return in resource.
* @tpPassCrit Violation count should be correct according to resource definition.
* @tpSince RESTEasy 3.0.16
*/
@Test
@OperateOnDeployment(TEST_VIOLATIONS_BEFORE_RETURN_VALUE)
public void testViolationsBeforeReturnValue() throws Exception {
// Valid
ViolationExceptionObject foo = new ViolationExceptionObject("pqrs");
Response response = client.target(generateURL("/abc/wxyz/unused/unused", TEST_VIOLATIONS_BEFORE_RETURN_VALUE)).request()
.post(Entity.entity(foo, "application/foo"));
Assert.assertEquals(HttpResponseCodes.SC_OK, response.getStatus());
Assert.assertEquals("Server send wrong content", foo, response.readEntity(ViolationExceptionObject.class));
// Invalid: Should have 1 each of field, property, class, and parameter violations,
// and no return value violations.
foo = new ViolationExceptionObject("p");
response = client.target(generateURL("/a/z/unused/unused", TEST_VIOLATIONS_BEFORE_RETURN_VALUE)).request()
.post(Entity.entity(foo, "application/foo"));
logger.info("response: " + response);
Assert.assertEquals(HttpResponseCodes.SC_BAD_REQUEST, response.getStatus());
Object entity = response.readEntity(String.class);
logger.info("entity: " + entity);
String header = response.getStringHeaders().getFirst(Validation.VALIDATION_HEADER);
Assert.assertNotNull("Header of response should not be null", header);
Assert.assertTrue("Validation header is not correct", Boolean.valueOf(header));
ResteasyViolationException e = new ResteasyViolationExceptionImpl(String.class.cast(entity));
logger.info("exception: " + e.toString());
TestUtil.countViolations(e, 4, 2, 1, 1, 0);
ResteasyConstraintViolation violation = TestUtil.getViolationByMessage(e.getPropertyViolations(), "size must be between 2 and 4");
Assert.assertNotNull("Exception has wrong message", violation);
Assert.assertEquals("Exception has wrong value", "a", violation.getValue());
violation = TestUtil.getViolationByMessage(e.getPropertyViolations(), "size must be between 3 and 5");
Assert.assertNotNull("Exception has wrong message", violation);
Assert.assertEquals("Exception has wrong value", "z", violation.getValue());
ResteasyConstraintViolation cv = e.getClassViolations().iterator().next();
Assert.assertEquals("Exception has wrong message", "Concatenation of s and t must have length > 5", cv.getMessage());
logger.info("value: " + cv.getValue());
Assert.assertTrue("Exception has wrong value", cv.getValue().startsWith("org.jboss.resteasy.test.validation.resource.ViolationExceptionResourceWithFiveViolations@"));
cv = e.getParameterViolations().iterator().next();
Assert.assertEquals("Exception has wrong message", "s must have length: 3 <= length <= 5", cv.getMessage());
Assert.assertEquals("Exception has wrong value", "Foo[p]", cv.getValue());
}
}
|
import {Component} from '@angular/core';
import {EventEmitter} from "@angular/compiler/src/facade/async";
import {ListItem} from "../list-item";
@Component({
selector:'shopping-list-edit-item',
template:`
<div class="input">
<label for="item-name">Name</label>
<input type="text" id="item-name" [(ngModel)]="item.name">
<label for="item-amount">Days</label>
<input type="text" id="item-amount" [(ngModel)]="item.amount">
<button class="danger" (click)="onDelete()">Delete</button>
</div>
`,
inputs:['item'],
outputs:['removed'],
})
export class ShoppingListEditItem {
item= {name:'', amount:0};
removed= new EventEmitter<ListItem>();
onDelete(){
this.removed.emit(this.item);
}
} |
#!/usr/bin/env bash
while getopts a:n:u:d: flag
do
case "${flag}" in
a) author=${OPTARG};;
n) name=${OPTARG};;
u) urlname=${OPTARG};;
d) description=${OPTARG};;
esac
done
echo "Author: $author";
echo "Project Name: $name";
echo "Project URL name: $urlname";
echo "Description: $description";
cameCase=$(sed -r 's/(^|-)(\w)/\U\2/g' <<<"$urlname")
namespace="namespace $cameCase"
plugin="new $cameCase.Plugin()"
settings="IPlugin$cameCase"
pageid=$(sed 's/[a-z ]//g' <<< "$cameCase")
echo "$cameCase $namespace $settings $pageid"
echo "Renaming project..."
original_author="MatrixRequirements"
original_name="matrix-ui-plugin-boilerplate"
original_description=" matrix-ui-plugin-boilerplate created by someone"
original_setting="IPluginBoilerPlate"
original_namespace="namespace BoilerPlate"
original_plugin="new BoilerPlate.Plugin()"
original_pageid="BPP"
for filename in $(git ls-files)
do
echo "Processing $filename"
[[ $filename = .github* ]] && echo "Skipping $filename"
[[ $filename != .github* ]] && sed -i "s/$original_name/$name/g" "$filename"
[[ $filename != .github* ]] && sed -i "s/$original_author/$author/g" "$filename"
[[ $filename != .github* ]] && sed -i "s/$original_description/$description/g" "$filename"
[[ $filename != .github* ]] && sed -i "s/$original_setting/$settings/g" "$filename"
[[ $filename != .github* ]] && sed -i "s/$original_namespace/$namespace/g" "$filename"
[[ $filename != .github* ]] && sed -i "s/$original_pageid/$pageid/g" "$filename"
[[ $filename != .github* ]] && sed -i "s/$original_plugin/$plugin/g" "$filename"
[[ $filename != .github* ]] && echo "$filename fixed"
[[ $filename = _*.ts ]] && git mv $filename $pageid.$filename
done
# This command runs only once on GHA!
|
package gapi
import (
"testing"
"github.com/gobs/pretty"
)
const (
createdAndUpdateDashboardResponse = `{
"slug": "test",
"id": 1,
"uid": "nErXDvCkzz",
"status": "success",
"version": 1
}`
getDashboardResponse = `{
"dashboard": {
"id": 1,
"uid": "cIBgcSjkk",
"title": "Production Overview",
"version": 0
},
"meta": {
"isStarred": false,
"url": "/d/cIBgcSjkk/production-overview",
"slug": "production-overview"
}
}`
getDashboardsJSON = `[
{
"id": 1,
"uid": "RGAPB1cZz",
"title": "Grafana Stats",
"uri": "db/grafana-stats",
"url": "/dashboards/d/RGAPB1cZz/grafana-stat",
"slug": "",
"type": "dash-db",
"tags": [],
"isStarred": false
}
]`
)
func TestDashboardCreateAndUpdate(t *testing.T) {
server, client := gapiTestTools(t, 200, createdAndUpdateDashboardResponse)
defer server.Close()
dashboard := Dashboard{
Model: map[string]interface{}{
"title": "test",
},
Folder: 0,
Overwrite: false,
}
resp, err := client.NewDashboard(dashboard)
if err != nil {
t.Fatal(err)
}
t.Log(pretty.PrettyFormat(resp))
if resp.UID != "nErXDvCkzz" {
t.Errorf("Invalid uid - %s, Expected %s", resp.UID, "nErXDvCkzz")
}
for _, code := range []int{400, 401, 403, 412} {
server.code = code
_, err = client.NewDashboard(dashboard)
if err == nil {
t.Errorf("%d not detected", code)
}
}
}
func TestDashboardGet(t *testing.T) {
server, client := gapiTestTools(t, 200, getDashboardResponse)
defer server.Close()
resp, err := client.Dashboard("test")
if err != nil {
t.Error(err)
}
uid, ok := resp.Model["uid"]
if !ok || uid != "cIBgcSjkk" {
t.Errorf("Invalid uid - %s, Expected %s", uid, "cIBgcSjkk")
}
resp, err = client.DashboardByUID("cIBgcSjkk")
if err != nil {
t.Fatal(err)
}
uid, ok = resp.Model["uid"]
if !ok || uid != "cIBgcSjkk" {
t.Fatalf("Invalid UID - %s, Expected %s", uid, "cIBgcSjkk")
}
for _, code := range []int{401, 403, 404} {
server.code = code
_, err = client.Dashboard("test")
if err == nil {
t.Errorf("%d not detected", code)
}
_, err = client.DashboardByUID("cIBgcSjkk")
if err == nil {
t.Errorf("%d not detected", code)
}
}
}
func TestDashboardDelete(t *testing.T) {
server, client := gapiTestTools(t, 200, "")
defer server.Close()
err := client.DeleteDashboard("test")
if err != nil {
t.Error(err)
}
err = client.DeleteDashboardByUID("cIBgcSjkk")
if err != nil {
t.Fatal(err)
}
for _, code := range []int{401, 403, 404, 412} {
server.code = code
err = client.DeleteDashboard("test")
if err == nil {
t.Errorf("%d not detected", code)
}
err = client.DeleteDashboardByUID("cIBgcSjkk")
if err == nil {
t.Errorf("%d not detected", code)
}
}
}
func TestDashboards(t *testing.T) {
server, client := gapiTestTools(t, 200, getDashboardsJSON)
defer server.Close()
dashboards, err := client.Dashboards()
if err != nil {
t.Fatal(err)
}
t.Log(pretty.PrettyFormat(dashboards))
if len(dashboards) != 1 {
t.Error("Length of returned dashboards should be 1")
}
if dashboards[0].ID != 1 || dashboards[0].Title != "Grafana Stats" {
t.Error("Not correctly parsing returned dashboards.")
}
}
|
from typing import List
def max_profit(prices: List[int]) -> int:
if not prices:
return 0
min_price = prices[0]
max_profit = 0
for price in prices:
if price < min_price:
min_price = price
else:
max_profit = max(max_profit, price - min_price)
return max_profit |
number = 2
prime_list = []
# A function that checks whether a given number is prime or not.
def is_prime(num):
for i in range(2, num):
if num % i == 0:
return False
return True
while number < 100:
if (is_prime(number)):
prime_list.append(number)
number += 1 |
#!/bin/bash
if [ $# -ne 2 ]
then
echo ""
echo "Usage: $0 <output prefix> <input.hg38.vcf.gz>"
echo ""
exit -1
fi
SCRIPT=$(readlink -f "$0")
BASEDIR=$(dirname "$SCRIPT")
export PATH=/g/funcgen/bin/:${PATH}
# CMD params
THREADS=4
OP=${1}
INVCF=${2}
# Fetch input variants
bcftools annotate -O b -o ${OP}.input.bcf -x INFO,^FORMAT/GT ${INVCF}
bcftools index ${OP}.input.bcf
# Phase against 1kGP
FILES=""
for CHR in chr1 chr2 chr3 chr4 chr5 chr6 chr7 chr8 chr9 chr10 chr11 chr12 chr13 chr14 chr15 chr16 chr17 chr18 chr19 chr20 chr21 chr22
do
echo "Eagle2 phasing ${CHR}"
if [ `bcftools view ${OP}.input.bcf ${CHR} | grep -m 1 "^#CHROM" -A 1 | wc -l` -eq 2 ]
then
eagle --numThreads ${THREADS} --vcfRef ${BASEDIR}/../refpanelHG38/${CHR}.bcf --vcfTarget ${OP}.input.bcf --geneticMapFile ${BASEDIR}/../refpanelHG38/genetic_map_hg38_withX.txt.gz --outPrefix ${OP}.${CHR}.eagle2 --vcfOutFormat b --chrom ${CHR} 2>&1 | gzip -c > ${OP}.${CHR}.eagle2.log.gz
bcftools index ${OP}.${CHR}.eagle2.bcf
FILES=${FILES}" "${OP}.${CHR}.eagle2.bcf
fi
done
rm ${OP}.input.bcf ${OP}.input.bcf.csi
# Concatenate chromosomes
bcftools concat -O b -o ${OP}.phased.bcf ${FILES}
bcftools index ${OP}.phased.bcf
rm ${OP}.chr*.eagle2.bcf ${OP}.chr*.eagle2.bcf.csi
|
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mammb.code.jpostal.source;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
/**
* PostalSource.
*
* @author naotsugu
*/
public interface PostalSource {
/**
* Get the source url.
* @return the source url
*/
String url();
/**
* Get the {@code PostalSourceReader}.
* @param path the path of source csv file
* @return the {@code PostalSourceReader}
*/
PostalSourceReader reader(Path path);
/**
* Add {@code TownEditor}.
* @param editors the list of {@code TownEditor}
*/
void with(List<TownEditor> editors);
/**
* Get the standard source.
* @return standard source
*/
static PostalSource standardSource() {
return new PostalSource() {
private List<TownEditor> editors = new ArrayList<>();
@Override
public String url() {
return "https://www.post.japanpost.jp/zipcode/dl/kogaki/zip/ken_all.zip";
}
@Override
public PostalSourceReader reader(Path path) {
return StandardSourceLineReader.of(path, editors);
}
@Override
public void with(List<TownEditor> editors) {
this.editors.addAll(editors);
}
};
}
/**
* Get the office source.
* @return office source
*/
static PostalSource officeSource() {
return new PostalSource() {
@Override
public String url() {
return "https://www.post.japanpost.jp/zipcode/dl/jigyosyo/zip/jigyosyo.zip";
}
@Override
public PostalSourceReader reader(Path path) {
return OfficeSourceLineReader.of(path);
}
@Override
public void with(List<TownEditor> editors) {
// Unsupported
}
};
}
}
|
def acronym(sentence):
# Split the sentence into individual words
words = sentence.split(" ")
# Initialize output string
output = ""
# Iterate over the words
for word in words:
# Append first letter of each word to the output string
output += word[0]
# Return the acronym
return output
# Test
sentence = "International Business Machines"
print(acronym(sentence)) |
#!/usr/bin/env bash
# setting the locale, some users have issues with different locales, this forces the correct one
export LC_ALL=en_US.UTF-8
# function for getting the refresh rate
get_tmux_option() {
local option=$1
local default_value=$2
local option_value=$(tmux show-option -gqv "$option")
if [ -z $option_value ]; then
echo $default_value
else
echo $option_value
fi
}
get_percent()
{
case $(uname -s) in
Linux)
# percent=$(free -m | awk 'NR==2{printf "%.1f%%\n", $3*100/$2}')
total_mem_gb=$(free -g | awk '/^Mem/ {print $2}')
used_mem=$(free -g | awk '/^Mem/ {print $3}')
total_mem=$(free -h | awk '/^Mem/ {print $2}')
if (( $total_mem_gb == 0)); then
memory_usage=$(free -m | awk '/^Mem/ {print $3}')
total_mem_mb=$(free -m | awk '/^Mem/ {print $2}')
echo $memory_usage\M\B/$total_mem_mb\M\B
elif (( $used_mem == 0 )); then
memory_usage=$(free -m | awk '/^Mem/ {print $3}')
echo $memory_usage\M\B/$total_mem_gb\G\B
else
memory_usage=$(free -g | awk '/^Mem/ {print $3}')
echo $memory_usage\G\B/$total_mem_gb\G\B
fi
;;
Darwin)
# percent=$(ps -A -o %mem | awk '{mem += $1} END {print mem}')
# Get used memory blocks with vm_stat, multiply by 4096 to get size in bytes, then convert to MiB
used_mem=$(vm_stat | grep ' active\|wired ' | sed 's/[^0-9]//g' | paste -sd ' ' - | awk '{printf "%d\n", ($1+$2) * 4096 / 1048576}')
total_mem=$(system_profiler SPHardwareDataType | grep "Memory:" | awk '{print $2 $3}')
if (( $used_mem < 1024 )); then
echo $used_mem\M\B/$total_mem
else
memory=$(($used_mem/1024))
echo $memory\G\B/$total_mem
fi
;;
FreeBSD)
# Looked at the code from neofetch
hw_pagesize="$(sysctl -n hw.pagesize)"
mem_inactive="$(($(sysctl -n vm.stats.vm.v_inactive_count) * hw_pagesize))"
mem_unused="$(($(sysctl -n vm.stats.vm.v_free_count) * hw_pagesize))"
mem_cache="$(($(sysctl -n vm.stats.vm.v_cache_count) * hw_pagesize))"
free_mem=$(((mem_inactive + mem_unused + mem_cache) / 1024 / 1024))
total_mem=$(($(sysctl -n hw.physmem) / 1024 / 1024))
used_mem=$((total_mem - free_mem))
echo $used_mem
if (( $used_mem < 1024 )); then
echo $used_mem\M\B/$total_mem
else
memory=$(($used_mem/1024))
echo $memory\G\B/$total_mem
fi
;;
CYGWIN*|MINGW32*|MSYS*|MINGW*)
# TODO - windows compatability
;;
esac
}
main()
{
# storing the refresh rate in the variable RATE, default is 5
RATE=$(get_tmux_option "@dracula-refresh-rate" 5)
ram_label=$(get_tmux_option "@dracula-ram-usage-label" "RAM")
ram_percent=$(get_percent)
echo "$ram_label $ram_percent"
sleep $RATE
}
#run main driver
main
|
#!/bin/sh
# ---------------------------------------------------------------------------
# Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
cygwin=false;
darwin=false;
os400=false;
mingw=false;
case "`uname`" in
CYGWIN*) cygwin=true;;
MINGW*) mingw=true;;
OS400*) os400=true;;
Darwin*) darwin=true
if [ -z "$JAVA_VERSION" ] ; then
JAVA_VERSION="CurrentJDK"
else
echo "Using Java version: $JAVA_VERSION"
fi
if [ -z "$JAVA_HOME" ] ; then
JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home
fi
;;
esac
# resolve links - $0 may be a softlink
PRG="$0"
while [ -h "$PRG" ]; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '.*/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`/"$link"
fi
done
# Get standard environment variables
PRGDIR=`dirname "$PRG"`
# Only set CARBON_HOME if not already set
[ -z "$CARBON_HOME" ] && CARBON_HOME=`cd "$PRGDIR/.." ; pwd`
[ -z "$RUNTIME_HOME" ] && RUNTIME_HOME=`cd "$PRGDIR/../wso2/dashboard" ; pwd`
# Installing jars
java -cp "$CARBON_HOME/bin/tools/*" -Dwso2.carbon.tool="install-jars" org.wso2.carbon.tools.CarbonToolExecutor "$CARBON_HOME"
###########################################################################
NAME=start-dashboard
# Daemon name, where is the actual executable
DASHBOARD_INIT_SCRIPT="$CARBON_HOME/wso2/dashboard/bin/carbon.sh"
# If the daemon is not there, then exit.
$DASHBOARD_INIT_SCRIPT $*
exit;
|
/*
* (C) Copyright 2002
* <NAME>, <EMAIL>
*
* SPDX-License-Identifier: GPL-2.0+
*/
/*
* Boot support
*/
#include <common.h>
#include <command.h>
#include <s_record.h>
#include <net.h>
#include <ata.h>
#include <asm/io.h>
#include <part.h>
#include <fat.h>
#include <fs.h>
#include <sys_partition.h>
int do_fat_fsload (cmd_tbl_t *cmdtp, int flag, int argc, char * const argv[])
{
return do_load(cmdtp, flag, argc, argv, FS_TYPE_FAT);
}
U_BOOT_CMD(
fatload, 7, 0, do_fat_fsload,
"load binary file from a dos filesystem",
"<interface> [<dev[:part]>] <addr> <filename> [bytes [pos]]\n"
" - Load binary file 'filename' from 'dev' on 'interface'\n"
" to address 'addr' from dos filesystem.\n"
" 'pos' gives the file position to start loading from.\n"
" If 'pos' is omitted, 0 is used. 'pos' requires 'bytes'.\n"
" 'bytes' gives the size to load. If 'bytes' is 0 or omitted,\n"
" the load stops on end of file.\n"
" If either 'pos' or 'bytes' are not aligned to\n"
" ARCH_DMA_MINALIGN then a misaligned buffer warning will\n"
" be printed and performance will suffer for the load."
);
int do_aw_fat_fsload(cmd_tbl_t *cmdtp, int flag, int argc, char * const argv[])
{
return do_aw_load(cmdtp, flag, argc, argv, FS_TYPE_FAT);
}
U_BOOT_CMD(
aw_fatload, 7, 0, do_aw_fat_fsload,
"load binary file from a dos filesystem",
"<interface> [<dev[:part]>] <addr> <filename> [bytes [pos]]\n"
" - Load binary file 'filename' from 'dev' on 'interface'\n"
" to address 'addr' from dos filesystem.\n"
" 'pos' gives the file position to start loading from.\n"
" If 'pos' is omitted, 0 is used. 'pos' requires 'bytes'.\n"
" 'bytes' gives the size to load. If 'bytes' is 0 or omitted,\n"
" the load stops on end of file.\n"
" If either 'pos' or 'bytes' are not aligned to\n"
" ARCH_DMA_MINALIGN then a misaligned buffer warning will\n"
" be printed and performance will suffer for the load."
);
int aw_fat_fsload(char *part_name, char *file_name, char* load_addr, ulong length)
{
int part_no;
char part_num[16] = {0};
char filename[32] = {0};
char read_addr[32] = {0};
char len[16] = {0};
unsigned int read_bytes = 0;
part_no = sunxi_partition_get_partno_byname(part_name);
if(part_no < 0)
{
printf("no the part:%s\n", part_name);
return -1;
}
char * temp_argv[6] = { "fatload", "sunxi_flash", part_num, "00000000", filename, len};
sprintf(part_num, "%x:0", part_no);
temp_argv[2] = part_num;
sprintf(read_addr, "%lx", (ulong)load_addr);
temp_argv[3] = read_addr;
memset(filename, 0, 32);
strcpy(filename, file_name);
sprintf(len, "%ld", length);
temp_argv[5] = len;
read_bytes = do_aw_fat_fsload(0, 0, 5, temp_argv);
if(read_bytes <= 0)
{
printf("do_aw_fat_fsload: unable to open file %s\n", temp_argv[4]);
return -1;
}
return read_bytes;
}
static int do_fat_ls(cmd_tbl_t *cmdtp, int flag, int argc, char * const argv[])
{
return do_ls(cmdtp, flag, argc, argv, FS_TYPE_FAT);
}
U_BOOT_CMD(
fatls, 4, 1, do_fat_ls,
"list files in a directory (default /)",
"<interface> [<dev[:part]>] [directory]\n"
" - list files from 'dev' on 'interface' in a 'directory'"
);
static int do_fat_fsinfo(cmd_tbl_t *cmdtp, int flag, int argc,
char * const argv[])
{
int dev, part;
block_dev_desc_t *dev_desc;
disk_partition_t info;
if (argc < 2) {
printf("usage: fatinfo <interface> [<dev[:part]>]\n");
return 0;
}
part = get_device_and_partition(argv[1], argv[2], &dev_desc, &info, 1);
if (part < 0)
return 1;
dev = dev_desc->dev;
if (fat_set_blk_dev(dev_desc, &info) != 0) {
printf("\n** Unable to use %s %d:%d for fatinfo **\n",
argv[1], dev, part);
return 1;
}
return file_fat_detectfs();
}
U_BOOT_CMD(
fatinfo, 3, 1, do_fat_fsinfo,
"print information about filesystem",
"<interface> [<dev[:part]>]\n"
" - print information about filesystem from 'dev' on 'interface'"
);
#ifdef CONFIG_FAT_WRITE
static int do_fat_fswrite(cmd_tbl_t *cmdtp, int flag,
int argc, char * const argv[])
{
long size;
unsigned long addr;
unsigned long count;
block_dev_desc_t *dev_desc = NULL;
disk_partition_t info;
int dev = 0;
int part = 1;
void *buf;
if (argc < 5)
return cmd_usage(cmdtp);
part = get_device_and_partition(argv[1], argv[2], &dev_desc, &info, 1);
if (part < 0)
return 1;
dev = dev_desc->dev;
if (fat_set_blk_dev(dev_desc, &info) != 0) {
printf("\n** Unable to use %s %d:%d for fatwrite **\n",
argv[1], dev, part);
return 1;
}
addr = simple_strtoul(argv[3], NULL, 16);
count = simple_strtoul(argv[5], NULL, 16);
buf = map_sysmem(addr, count);
size = file_fat_write(argv[4], buf, count);
unmap_sysmem(buf);
if (size == -1) {
printf("\n** Unable to write \"%s\" from %s %d:%d **\n",
argv[4], argv[1], dev, part);
return 1;
}
printf("%ld bytes written\n", size);
return 0;
}
U_BOOT_CMD(
fatwrite, 6, 0, do_fat_fswrite,
"write file into a dos filesystem",
"<interface> <dev[:part]> <addr> <filename> <bytes>\n"
" - write file 'filename' from the address 'addr' in RAM\n"
" to 'dev' on 'interface'"
);
#endif
|
#include <iostream>
int main()
{
for (int i = 10; i > 0; i--)
{
std::cout << i << std::endl;
}
return 0;
} |
<gh_stars>10-100
import { KeyboardNavigationAction } from './keyboard-navigable'
export interface KeyboardNavigationHandler {
getLeftItem(action: KeyboardNavigationAction): HTMLElement | undefined
getRightItem(action: KeyboardNavigationAction): HTMLElement | undefined
getUpItem(action: KeyboardNavigationAction): HTMLElement | undefined
getDownItem(action: KeyboardNavigationAction): HTMLElement | undefined
getFirstItem(action: KeyboardNavigationAction): HTMLElement | undefined
getLastItem(action: KeyboardNavigationAction): HTMLElement | undefined
}
|
package aero.rb.rulebook;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
@SuppressWarnings("ConstantConditions")
public class RulebookTest {
Rule<String> rule = Rule.of(String::isEmpty, System.out::println);
Rule<String> rule2 = Rule.of("abcdefg"::equals, System.out::println);
Ruleset<String> ruleset = Ruleset.of(String::isEmpty, rule, rule2);
@Test
void ofNullParam() {
assertThrows(NullPointerException.class, () -> Rulebook.of(null));
}
@Test
void ofNullInVarargsConsumers() {
assertThrows(NullPointerException.class, () -> Rulebook.of(ruleset, null));
}
@Test
void ofSingleRuleset() {
Rulebook.of(ruleset);
}
@Test
void ofMultipleRulesets() {
Rulebook.of(ruleset, ruleset);
}
@Test
void getMatchingRulesNullPointerTest() {
Rulebook<String> rulebook = Rulebook.of(ruleset);
assertThrows(NullPointerException.class, () -> rulebook.getMatchingRules(null));
}
@Test
void getMatchingRulesTrueTest() {
Rulebook<String> rulebook = Rulebook.of(ruleset);
List<IRule<String>> matchingRules = rulebook.getMatchingRules("");
assertEquals(1, matchingRules.size());
assertEquals(rule, matchingRules.get(0));
}
@Test
void getMatchingRulesFalseTest() {
Rulebook<String> rulebook = Rulebook.of(ruleset);
List<IRule<String>> matchingRules = rulebook.getMatchingRules("123");
assertEquals(0, matchingRules.size());
}
@Test
void invokeMatchingNullPointerTest() {
Rulebook<String> rulebook = Rulebook.of(ruleset);
assertThrows(NullPointerException.class, () -> rulebook.invokeMatchingRules(null));
}
@Test
void invokeMatchingSingleRulesetTest() {
CountDownLatch latch = new CountDownLatch(4);
Rulebook<String> rulebook =
Rulebook.of(
Ruleset.of(str -> true,
Rule.of(String::isEmpty, str -> latch.countDown()),
Rule.of(String::isEmpty, str -> latch.countDown()),
Rule.of(String::isEmpty, str -> latch.countDown())
)
);
rulebook.invokeMatchingRules("");
rulebook.invokeMatchingRules("123");
assertEquals(1, latch.getCount());
}
@Test
void invokeMatchingMultipleRulesetTest() {
CountDownLatch latch = new CountDownLatch(7);
Rulebook<String> rulebook =
Rulebook.of(
Ruleset.of(str -> true,
Rule.of(str -> !str.isEmpty(), str -> latch.countDown()),
Rule.of(str -> !str.isEmpty(), str -> latch.countDown()),
Rule.of(str -> !str.isEmpty(), str -> latch.countDown())
),
Ruleset.of(str -> str.length() == 3,
Rule.of("123"::equals, str -> latch.countDown()),
Rule.of("123"::equals, str -> latch.countDown()),
Rule.of("123"::equals, str -> latch.countDown())
)
);
rulebook.invokeMatchingRules("");
rulebook.invokeMatchingRules("123");
assertEquals(1, latch.getCount());
}
}
|
<filename>source/javascripts/table.js
//=require tablesift.js/dest/tablesift
TableSift.init('sortable', {
removeChars: [',', '$'],
customSort:{
2: function(con, el) {
return Date.parse(con);
},
4: function(con, el) {
return Date.parse(con);
},
6: function(con, el) {
if (con.match(/\d/)) {
return Number(con.replace(/[^0-9\.]+/g,""));
} else {
return 0;
}
},
7: function(con, el) {
if (con.match(/\d/)) {
return Number(con.replace(/[^0-9\.]+/g,""));
} else {
return -10;
}
},
8: function(con, el) {
if (con.match(/\d/)) {
return Number(con.replace(/[^0-9\.]+/g,""));
} else {
return -10;
}
},
9: function(con, el) {
if (con.match(/\d/)) {
return Number(con.replace(/\,|\*+/, ''));
} else {
return -50000;
}
}
}
});
|
cd ~/Daily
pd=`date '+%Y-%m-%d'`
mkdir -p $pd
cd $pd
fn=`date '+%H %M %S'`.xwd
xwd -root -out "$fn"
gimp --no-data --no-fonts --as-new "$fn" &
sleep 10
rm "$fn" |
import { defineMessages, useIntl } from 'react-intl';
import { Button, Modal as BootstrapModal } from 'react-bootstrap';
import '../css/modal.css';
export interface IModalProps {
show: boolean;
title: string;
acceptTitle?: React.ReactNode;
closeTitle?: React.ReactNode;
onAccept?: () => void;
onClose?: () => void;
noCloseButton?: boolean;
size?: 'sm' | 'lg';
children: React.ReactNode;
}
const Modal: React.FC<IModalProps> = (props: IModalProps) => {
const { formatMessage } = useIntl();
const messages = defineMessages({
accept: {
id: 'modal.accept',
description: 'Default modal accept button',
defaultMessage: 'Accept',
},
close: {
id: 'modal.close',
description: 'Default modal close button',
defaultMessage: 'Close',
},
});
const { show, title, acceptTitle, closeTitle, onAccept, onClose, noCloseButton, children } = props;
const handleClose = () => {
if (onClose) {
onClose();
}
};
const handleAccept = () => {
if (onAccept) {
onAccept();
}
};
const hasButton = onAccept != null || (onClose != null && !noCloseButton);
return (
<BootstrapModal
show = {show}
backdrop = 'static'
size = {props.size ?? 'lg'}
centered
onHide = {onClose}
keyboard = {false}>
<BootstrapModal.Header closeButton={onClose != null}>
<BootstrapModal.Title>{title}</BootstrapModal.Title>
</BootstrapModal.Header>
<BootstrapModal.Body>
{children}
</BootstrapModal.Body>
{hasButton && (
<BootstrapModal.Footer>
{onClose != null && !noCloseButton && (
<Button variant='secondary' onClick={handleClose}>
{closeTitle ?? formatMessage(messages.close)}
</Button>
)}
{onAccept != null && (
<Button variant='primary' onClick={handleAccept}>
{acceptTitle ?? formatMessage(messages.accept)}
</Button>
)}
</BootstrapModal.Footer>
)}
</BootstrapModal>
);
};
export default Modal;
|
import tensorflow as tf
import numpy as np
# Input data
features = np.array([[140, 3], [100, 2], [130, 2]])
labels = np.array([[180000], [200000], [155000]])
# Define model
model = tf.keras.models.Sequential([
tf.keras.layers.Dense(64, activation='relu', input_shape=(2,)),
tf.keras.layers.Dense(1)
])
# Compile model
optimizer = tf.keras.optimizers.SGD(learning_rate=0.01)
model.compile(
optimizer=optimizer,
loss=tf.keras.losses.MeanSquaredError(),
metrics=['MeanSquaredError']
)
# Train model
model.fit(features, labels, epochs=1000)
# Evaluate model
test_features = np.array([[115, 1]])
prediction = model.predict(test_features)[0][0]
print("Predicted house price:", prediction) |
export HADOOP_USER_CLASSPATH_FIRST=true #this prevents old metrics libs from mapreduce lib from bringing in old jar deps overriding HIVE_LIB
if [ "$SERVICE" = "cli" ]; then
if [ -z "$DEBUG" ]; then
export HADOOP_OPTS="$HADOOP_OPTS -XX:NewRatio=12 -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseNUMA -XX:+UseParallelGC -XX:-UseGCOverheadLimit"
else
export HADOOP_OPTS="$HADOOP_OPTS -XX:NewRatio=12 -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit"
fi
fi
# The heap size of the jvm stared by hive shell script can be controlled via:
if [ "$SERVICE" = "metastore" ]; then
export HADOOP_HEAPSIZE=4007 # Setting for HiveMetastore
else
export HADOOP_HEAPSIZE=1024 # Setting for HiveServer2 and Client
fi
export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Xmx${HADOOP_HEAPSIZE}m"
# Larger heap size may be required when running queries over large number of files or partitions.
# By default hive shell scripts use a heap size of 256 (MB). Larger heap size would also be
# appropriate for hive server (hwi etc).
# Set HADOOP_HOME to point to a specific hadoop install directory
HADOOP_HOME=${HADOOP_HOME:-/usr/hdp/current/hadoop-client}
export HIVE_HOME=${HIVE_HOME:-/usr/hdp/current/hive-client}
# Hive Configuration Directory can be controlled by:
export HIVE_CONF_DIR=${HIVE_CONF_DIR:-/usr/hdp/current/hive-client/conf}
# Folder containing extra libraries required for hive compilation/execution can be controlled by:
if [ "${HIVE_AUX_JARS_PATH}" != "" ]; then
if [ -f "${HIVE_AUX_JARS_PATH}" ]; then
export HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}
elif [ -d "/usr/hdp/current/hive-webhcat/share/hcatalog" ]; then
export HIVE_AUX_JARS_PATH=/usr/hdp/current/hive-webhcat/share/hcatalog/hive-hcatalog-core.jar
fi
elif [ -d "/usr/hdp/current/hive-webhcat/share/hcatalog" ]; then
export HIVE_AUX_JARS_PATH=/usr/hdp/current/hive-webhcat/share/hcatalog/hive-hcatalog-core.jar
fi
export METASTORE_PORT=9083
|
<reponame>isALEXme/cryptus
// tailwind.config.js
const colors = require("tailwindcss/colors");
module.exports = {
purge: ["./pages/**/*.{js,ts,jsx,tsx}", "./components/**/*.{js,ts,jsx,tsx}"],
darkMode: false, // or 'media' or 'class'
theme: {
inset: {
"1/5": "17%",
"2/5": "40%",
},
backgroundSize: {
auto: "auto",
cover: "cover",
contain: "contain",
"50%": "50%",
0: "50%",
16: "4rem",
},
extend: {},
colors: {
transparent: "transparent",
current: "currentColor",
black: colors.black,
white: colors.white,
gray: colors.trueGray,
indigo: colors.indigo,
red: colors.rose,
yellow: colors.amber,
azul: {
darkest: "#1f2d3d",
dark: "#56CAD8",
DEFAULT: "#6BD5E1",
light: "#e0e6ed",
lightest: "#f9fafc",
},
honey: {
DEFAULT: "#ffbaba",
},
tomato: {
DEFAULT: "#ED302C",
},
leaf: {
DEFAULT: "#8BD169",
},
olive: {
DEFAULT: "#F4F4DB",
},
dirt: {
DEFAULT: "#888375",
},
coquille: {
DEFAULT: "#FFFDF5",
},
},
},
variants: {
extend: {},
},
plugins: [],
};
|
from typing import List
def strip_image_registry(image: str) -> str:
registry = image.split('/')[0]
if registry in WHITELISTED_DOCKER_REGISTRIES:
return '/'.join(image.split('/')[1:])
else:
fatal_error("Invalid image to strip: %s Registry not in whitelist: %s", image, WHITELISTED_DOCKER_REGISTRIES)
def mirror_images(images: List[str]):
for image in images:
relative_image = strip_image_registry(image)
for mirror in DOCKER_REGISTRY_MIRRORS:
mirror_image = '/'.join((mirror, relative_image))
try:
run_cmd(['docker', 'tag', image, mirror_image])
run_cmd(['docker', 'push', mirror_image])
except Exception as e:
print(f"Error mirroring image {image} to {mirror}: {e}") |
<gh_stars>0
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.common.nosql.mongo.dao.model;
import static org.kaaproject.kaa.server.common.nosql.mongo.dao.model.MongoModelConstants.USER_CONFIGURATION;
import static org.kaaproject.kaa.server.common.nosql.mongo.dao.model.MongoModelConstants.USER_CONF_APP_TOKEN;
import static org.kaaproject.kaa.server.common.nosql.mongo.dao.model.MongoModelConstants.USER_CONF_BODY;
import static org.kaaproject.kaa.server.common.nosql.mongo.dao.model.MongoModelConstants.USER_CONF_SCHEMA_VERSION;
import static org.kaaproject.kaa.server.common.nosql.mongo.dao.model.MongoModelConstants.USER_CONF_USER_ID;
import java.io.Serializable;
import org.kaaproject.kaa.common.dto.EndpointUserConfigurationDto;
import org.kaaproject.kaa.server.common.dao.model.EndpointUserConfiguration;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.index.Indexed;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.Field;
@Document(collection = USER_CONFIGURATION)
public class MongoEndpointUserConfiguration implements EndpointUserConfiguration, Serializable {
private static final long serialVersionUID = 7678593961823855167L;
private static final String ID_DELIMITER = "|";
@Id
private String id;
@Indexed
@Field(USER_CONF_USER_ID)
private String userId;
@Indexed
@Field(USER_CONF_APP_TOKEN)
private String appToken;
@Indexed
@Field(USER_CONF_SCHEMA_VERSION)
private Integer schemaVersion;
@Field(USER_CONF_BODY)
private String body;
public MongoEndpointUserConfiguration() {
}
public MongoEndpointUserConfiguration(EndpointUserConfigurationDto dto) {
this.userId = dto.getUserId();
this.appToken = dto.getAppToken();
this.schemaVersion = dto.getSchemaVersion();
this.body = dto.getBody();
this.id = userId + ID_DELIMITER + appToken + ID_DELIMITER + schemaVersion;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getAppToken() {
return appToken;
}
public void setAppToken(String appToken) {
this.appToken = appToken;
}
public Integer getSchemaVersion() {
return schemaVersion;
}
public void setSchemaVersion(Integer schemaVersion) {
this.schemaVersion = schemaVersion;
}
public String getBody() {
return body;
}
public void setBody(String body) {
this.body = body;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MongoEndpointUserConfiguration that = (MongoEndpointUserConfiguration) o;
if (appToken != null ? !appToken.equals(that.appToken) : that.appToken != null) {
return false;
}
if (body != null ? !body.equals(that.body) : that.body != null) {
return false;
}
if (id != null ? !id.equals(that.id) : that.id != null) {
return false;
}
if (schemaVersion != null ? !schemaVersion.equals(that.schemaVersion) : that.schemaVersion != null) {
return false;
}
if (userId != null ? !userId.equals(that.userId) : that.userId != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = id != null ? id.hashCode() : 0;
result = 31 * result + (userId != null ? userId.hashCode() : 0);
result = 31 * result + (appToken != null ? appToken.hashCode() : 0);
result = 31 * result + (schemaVersion != null ? schemaVersion.hashCode() : 0);
result = 31 * result + (body != null ? body.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "MongoEndpointUserConfiguration{" +
"id='" + id + '\'' +
", userId='" + userId + '\'' +
", appToken='" + appToken + '\'' +
", schemaVersion=" + schemaVersion +
", body='" + body + '\'' +
'}';
}
@Override
public EndpointUserConfigurationDto toDto() {
EndpointUserConfigurationDto dto = new EndpointUserConfigurationDto();
dto.setAppToken(appToken);
dto.setBody(body);
dto.setSchemaVersion(schemaVersion);
dto.setUserId(userId);
return dto;
}
}
|
echo "Installing/Updating Ansible..."
apt-get update -qq && apt-get install ansible -y -qq
COUNT_HOSTS=`grep \\\[precise64\\\] /etc/ansible/hosts -c`
if [ $COUNT_HOSTS -lt 1 ]; then
printf "\n[precise64] \n127.0.0.1 ansible_connection=local\n" >> /etc/ansible/hosts
fi
echo "Running Ansible playbook..."
ansible-playbook /vagrant/vagrant/playbook.yml |
<reponame>LEMIBANDDEXARI/bttv<gh_stars>0
package kotlin.jvm.internal;
public final class DefaultConstructorMarker {
public DefaultConstructorMarker() {
}
}
|
import java.io.*;
import javax.servlet.*;
import javax.servlet.http.*;
public class HelloWorld extends HttpServlet {
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
response.setContentType("text/html");
PrintWriter out = response.getWriter();
out.println("<html><body>");
out.println("<h1>Hello, " + request.getParameter("name") + "!</h1>");
out.println("</body></html>");
}
} |
def sort(arr):
arr_len = len(arr)
for i in range(arr_len):
for j in range(arr_len - i - 1):
if arr[j] > arr[j + 1]:
arr[j], arr[j + 1] = arr[j + 1], arr[j]
return arr
print(sort([1, 5, 4, 2, 3])) # Outputs [1, 2, 3, 4, 5] |
fn main() {
let sentence = "The big brown fox jumped over the lazy old dog";
let word = "jumped"; // the word to find
let f = |str| str.to_uppercase(); // function to apply
for w in sentence.split_whitespace() {
if w == word {
let result = f(w);
println!("{} replaced with {}", word, result);
}
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.