content stringlengths 1 103k ⌀ | path stringlengths 8 216 | filename stringlengths 2 179 | language stringclasses 15
values | size_bytes int64 2 189k | quality_score float64 0.5 0.95 | complexity float64 0 1 | documentation_ratio float64 0 1 | repository stringclasses 5
values | stars int64 0 1k | created_date stringdate 2023-07-10 19:21:08 2025-07-09 19:11:45 | license stringclasses 4
values | is_test bool 2
classes | file_hash stringlengths 32 32 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n// SPDX-License-Identifier: Apache-2.0\n// SPDX-License-Identifier: MIT\n\nimport Foundation\n\n// declare our empty protocol, and conformance, for typing\npublic protocol JSValue {}\nextension String: JSValue {}\nextension Bool: JSValue {}\nextension Int: JSValue {}\nextension Float: JSValue {}\nextension Double: JSValue {}\nextension NSNumber: JSValue {}\nextension NSNull: JSValue {}\nextension Array: JSValue {}\nextension Date: JSValue {}\nextension Dictionary: JSValue where Key == String, Value == JSValue {}\n\n// convenience aliases\npublic typealias JSObject = [String: JSValue]\npublic typealias JSArray = [JSValue]\n\nextension Dictionary where Key == String, Value == JSValue {\n public func getValue(_ key: String) -> JSValue? {\n return self[key]\n }\n\n public func getString(_ key: String) -> String? {\n return self[key] as? String\n }\n\n public func getBool(_ key: String) -> Bool? {\n return self[key] as? Bool\n }\n\n public func getInt(_ key: String) -> Int? {\n return self[key] as? Int\n }\n\n public func getFloat(_ key: String) -> Float? {\n if let floatValue = self[key] as? Float {\n return floatValue\n } else if let doubleValue = self[key] as? Double {\n return Float(doubleValue)\n }\n return nil\n }\n\n public func getDouble(_ key: String) -> Double? {\n return self[key] as? Double\n }\n\n public func getArray(_ key: String) -> JSArray? {\n return self[key] as? JSArray\n }\n\n public func getObject(_ key: String) -> JSObject? {\n return self[key] as? JSObject\n }\n}\n\n/*\n Simply casting objects from foundation class clusters (such as __NSArrayM)\n doesn't work with the JSValue protocol and will always fail. So we need to\n recursively and explicitly convert each value in the dictionary.\n */\npublic enum JSTypes {}\nextension JSTypes {\n public static func coerceDictionaryToJSObject(\n _ dictionary: NSDictionary?, formattingDatesAsStrings: Bool = false\n ) -> JSObject? {\n return coerceToJSValue(dictionary, formattingDates: formattingDatesAsStrings) as? JSObject\n }\n\n public static func coerceDictionaryToJSObject(\n _ dictionary: [AnyHashable: Any]?, formattingDatesAsStrings: Bool = false\n ) -> JSObject? {\n return coerceToJSValue(dictionary, formattingDates: formattingDatesAsStrings) as? JSObject\n }\n}\n\nprivate let dateStringFormatter = ISO8601DateFormatter()\n\n// We need a large switch statement because we have a lot of types.\n// swiftlint:disable:next cyclomatic_complexity\nprivate func coerceToJSValue(_ value: Any?, formattingDates: Bool) -> JSValue? {\n guard let value = value else {\n return nil\n }\n switch value {\n case let stringValue as String:\n return stringValue\n case let numberValue as NSNumber:\n return numberValue\n case let boolValue as Bool:\n return boolValue\n case let intValue as Int:\n return intValue\n case let floatValue as Float:\n return floatValue\n case let doubleValue as Double:\n return doubleValue\n case let dateValue as Date:\n if formattingDates {\n return dateStringFormatter.string(from: dateValue)\n }\n return dateValue\n case let nullValue as NSNull:\n return nullValue\n case let arrayValue as NSArray:\n return arrayValue.compactMap { coerceToJSValue($0, formattingDates: formattingDates) }\n case let dictionaryValue as NSDictionary:\n let keys = dictionaryValue.allKeys.compactMap { $0 as? String }\n var result: JSObject = [:]\n for key in keys {\n result[key] = coerceToJSValue(dictionaryValue[key], formattingDates: formattingDates)\n }\n return result\n default:\n return nil\n }\n}\n | dataset_sample\swift\tauri-apps_tauri\crates\tauri\mobile\ios-api\Sources\Tauri\JSTypes.swift | JSTypes.swift | Swift | 3,614 | 0.95 | 0.065041 | 0.083333 | react-lib | 654 | 2023-08-21T11:38:58.045543 | MIT | false | a507207bb030a90c7501cb17b8711729 |
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n// SPDX-License-Identifier: Apache-2.0\n// SPDX-License-Identifier: MIT\n\nimport os.log\nimport UIKit\n\n/// Wrapper class for os_log function\npublic class Logger {\n private static var _enabled = false\n public static var enabled: Bool {\n get {\n #if DEBUG\n return true\n #else\n return _enabled\n #endif\n }\n set {\n Logger._enabled = newValue\n }\n }\n\n static func log(_ items: Any..., category: String, type: OSLogType) {\n if Logger.enabled {\n var message = ""\n let last = items.count - 1\n for (index, item) in items.enumerated() {\n message += "\(item)"\n if index != last {\n message += " "\n }\n }\n let log = OSLog(subsystem: Bundle.main.bundleIdentifier ?? "-", category: category)\n os_log("%{public}@", log: log, type: type, String(message.prefix(4068)))\n }\n }\n\n public static func debug(_ items: Any..., category: String = "app") {\n #if DEBUG\n Logger.log(items, category: category, type: OSLogType.default)\n #else\n Logger.log(items, category: category, type: OSLogType.debug)\n #endif\n }\n\n public static func info(_ items: Any..., category: String = "app") {\n #if DEBUG\n Logger.log(items, category: category, type: OSLogType.default)\n #else\n Logger.log(items, category: category, type: OSLogType.info)\n #endif\n }\n\n public static func error(_ items: Any..., category: String = "app") {\n Logger.log(items, category: category, type: OSLogType.error)\n }\n}\n | dataset_sample\swift\tauri-apps_tauri\crates\tauri\mobile\ios-api\Sources\Tauri\Logger.swift | Logger.swift | Swift | 1,563 | 0.95 | 0.172414 | 0.25 | node-utils | 111 | 2024-12-11T00:14:26.820853 | BSD-3-Clause | false | f637b92f3047fbf591f08ea248be1db0 |
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n// SPDX-License-Identifier: Apache-2.0\n// SPDX-License-Identifier: MIT\n\nimport Foundation\nimport SwiftRs\nimport UIKit\nimport WebKit\nimport os.log\n\nclass PluginHandle {\n var instance: Plugin\n var loaded = false\n\n init(plugin: Plugin) {\n instance = plugin\n }\n}\n\npublic class PluginManager {\n static let shared: PluginManager = PluginManager()\n public var viewController: UIViewController?\n var plugins: [String: PluginHandle] = [:]\n var ipcDispatchQueue = DispatchQueue(label: "ipc")\n public var isSimEnvironment: Bool {\n #if targetEnvironment(simulator)\n return true\n #else\n return false\n #endif\n }\n\n public func assetUrl(fromLocalURL url: URL?) -> URL? {\n guard let inputURL = url else {\n return nil\n }\n\n return URL(string: "asset://localhost")!.appendingPathComponent(inputURL.path)\n }\n\n func onWebviewCreated(_ webview: WKWebView) {\n for (_, handle) in plugins {\n if !handle.loaded {\n handle.instance.load(webview: webview)\n }\n }\n }\n\n func load<P: Plugin>(name: String, plugin: P, config: String, webview: WKWebView?) {\n plugin.setConfig(config)\n let handle = PluginHandle(plugin: plugin)\n if let webview = webview {\n handle.instance.load(webview: webview)\n handle.loaded = true\n }\n plugins[name] = handle\n }\n\n func invoke(name: String, invoke: Invoke) {\n if let plugin = plugins[name] {\n ipcDispatchQueue.async {\n let selectorWithThrows = Selector(("\(invoke.command):error:"))\n if plugin.instance.responds(to: selectorWithThrows) {\n var error: NSError? = nil\n withUnsafeMutablePointer(to: &error) {\n let methodIMP: IMP! = plugin.instance.method(for: selectorWithThrows)\n unsafeBitCast(\n methodIMP, to: (@convention(c) (Any?, Selector, Invoke, OpaquePointer) -> Void).self)(\n plugin.instance, selectorWithThrows, invoke, OpaquePointer($0))\n }\n if let error = error {\n invoke.reject("\(error)")\n // TODO: app crashes without this leak\n let _ = Unmanaged.passRetained(error)\n }\n } else {\n let selector = Selector(("\(invoke.command):"))\n if plugin.instance.responds(to: selector) {\n plugin.instance.perform(selector, with: invoke)\n } else {\n invoke.reject("No command \(invoke.command) found for plugin \(name)")\n }\n }\n }\n } else {\n invoke.reject("Plugin \(name) not initialized")\n }\n }\n}\n\nextension PluginManager: NSCopying {\n public func copy(with zone: NSZone? = nil) -> Any {\n return self\n }\n}\n\n@_cdecl("register_plugin")\nfunc registerPlugin(name: SRString, plugin: NSObject, config: SRString, webview: WKWebView?) {\n PluginManager.shared.load(\n name: name.toString(),\n plugin: plugin as! Plugin,\n config: config.toString(),\n webview: webview\n )\n}\n\n@_cdecl("on_webview_created")\nfunc onWebviewCreated(webview: WKWebView, viewController: UIViewController) {\n PluginManager.shared.viewController = viewController\n PluginManager.shared.onWebviewCreated(webview)\n}\n\n@_cdecl("run_plugin_command")\nfunc runCommand(\n id: Int,\n name: SRString,\n command: SRString,\n data: SRString,\n callback: @escaping @convention(c) (Int, Bool, UnsafePointer<CChar>) -> Void,\n sendChannelData: @escaping @convention(c) (UInt64, UnsafePointer<CChar>) -> Void\n) {\n let callbackId: UInt64 = 0\n let errorId: UInt64 = 1\n let invoke = Invoke(\n command: command.toString(), callback: callbackId, error: errorId,\n sendResponse: { (fn: UInt64, payload: String?) -> Void in\n let success = fn == callbackId\n callback(id, success, payload ?? "null")\n },\n sendChannelData: { (id: UInt64, payload: String) -> Void in\n sendChannelData(id, payload)\n }, data: data.toString())\n PluginManager.shared.invoke(name: name.toString(), invoke: invoke)\n}\n | dataset_sample\swift\tauri-apps_tauri\crates\tauri\mobile\ios-api\Sources\Tauri\Tauri.swift | Tauri.swift | Swift | 3,982 | 0.95 | 0.089552 | 0.057851 | python-kit | 252 | 2024-04-07T22:20:15.741448 | MIT | false | cc076177a6b974ca731edb3f72cc6564 |
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n// SPDX-License-Identifier: Apache-2.0\n// SPDX-License-Identifier: MIT\n\nimport UIKit\n\npublic class UIUtils {\n public static func centerPopover(rootViewController: UIViewController?, popoverController: UIViewController) {\n if let viewController = rootViewController {\n popoverController.popoverPresentationController?.sourceRect = CGRect(x: viewController.view.center.x, y: viewController.view.center.y, width: 0, height: 0)\n popoverController.popoverPresentationController?.sourceView = viewController.view\n popoverController.popoverPresentationController?.permittedArrowDirections = UIPopoverArrowDirection.up\n }\n }\n}\n | dataset_sample\swift\tauri-apps_tauri\crates\tauri\mobile\ios-api\Sources\Tauri\UiUtils.swift | UiUtils.swift | Swift | 742 | 0.95 | 0.133333 | 0.230769 | python-kit | 533 | 2023-10-24T21:09:22.461575 | GPL-3.0 | false | 530eca83be47789d1c8dd701cdf44a8c |
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n// SPDX-License-Identifier: Apache-2.0\n// SPDX-License-Identifier: MIT\n\nimport WebKit\nimport os.log\n\nstruct RegisterListenerArgs: Decodable {\n let event: String\n let handler: Channel\n}\n\nstruct RemoveListenerArgs: Decodable {\n let event: String\n let channelId: UInt64\n}\n\nopen class Plugin: NSObject {\n public let manager: PluginManager = PluginManager.shared\n var config: String = "{}"\n private var listeners = [String: [Channel]]()\n\n internal func setConfig(_ config: String) {\n self.config = config\n }\n\n public func parseConfig<T: Decodable>(_ type: T.Type) throws -> T {\n let jsonData = self.config.data(using: .utf8)!\n let decoder = JSONDecoder()\n return try decoder.decode(type, from: jsonData)\n }\n\n @objc open func load(webview: WKWebView) {}\n\n @objc open func checkPermissions(_ invoke: Invoke) {\n invoke.resolve()\n }\n\n @objc open func requestPermissions(_ invoke: Invoke) {\n invoke.resolve()\n }\n\n public func trigger(_ event: String, data: JSObject) {\n if let eventListeners = listeners[event] {\n for channel in eventListeners {\n channel.send(data)\n }\n }\n }\n\n public func trigger<T: Encodable>(_ event: String, data: T) throws {\n if let eventListeners = listeners[event] {\n for channel in eventListeners {\n try channel.send(data)\n }\n }\n }\n\n @objc func registerListener(_ invoke: Invoke) throws {\n let args = try invoke.parseArgs(RegisterListenerArgs.self)\n\n if var eventListeners = listeners[args.event] {\n eventListeners.append(args.handler)\n } else {\n listeners[args.event] = [args.handler]\n }\n\n invoke.resolve()\n }\n\n @objc func removeListener(_ invoke: Invoke) throws {\n let args = try invoke.parseArgs(RemoveListenerArgs.self)\n\n if let eventListeners = listeners[args.event] {\n\n listeners[args.event] = eventListeners.filter { $0.id != args.channelId }\n }\n\n invoke.resolve()\n }\n}\n | dataset_sample\swift\tauri-apps_tauri\crates\tauri\mobile\ios-api\Sources\Tauri\Plugin\Plugin.swift | Plugin.swift | Swift | 1,991 | 0.95 | 0.135802 | 0.047619 | react-lib | 247 | 2024-05-10T12:48:54.703182 | Apache-2.0 | false | ebea977cc622d6145c1306a33543911f |
// swift-tools-version:5.3\n// The swift-tools-version declares the minimum version of Swift required to build this package.\n\nimport PackageDescription\n\nlet package = Package(\n name: "tauri-plugin-{{ plugin_name }}",\n platforms: [\n .macOS(.v10_13),\n .iOS(.v13),\n ],\n products: [\n // Products define the executables and libraries a package produces, and make them visible to other packages.\n .library(\n name: "tauri-plugin-{{ plugin_name }}",\n type: .static,\n targets: ["tauri-plugin-{{ plugin_name }}"]),\n ],\n dependencies: [\n .package(name: "Tauri", path: "../.tauri/tauri-api")\n ],\n targets: [\n // Targets are the basic building blocks of a package. A target can define a module or a test suite.\n // Targets can depend on other targets in this package, and on products in packages this package depends on.\n .target(\n name: "tauri-plugin-{{ plugin_name }}",\n dependencies: [\n .byName(name: "Tauri")\n ],\n path: "Sources")\n ]\n)\n | dataset_sample\swift\tauri-apps_tauri\crates\tauri-cli\templates\plugin\ios-spm\Package.swift | Package.swift | Swift | 1,101 | 0.95 | 0 | 0.166667 | awesome-app | 804 | 2024-08-13T03:56:10.766789 | GPL-3.0 | false | 89d64ce46b1b2bd607f5fe8f77147b31 |
// swift-tools-version:5.3\n// Copyright 2019-2024 Tauri Programme within The Commons Conservancy\n// SPDX-License-Identifier: Apache-2.0\n// SPDX-License-Identifier: MIT\n\nimport PackageDescription\n\nlet package = Package(\n name: "tauri-plugin-sample",\n platforms: [\n .macOS(.v10_13),\n .iOS(.v13),\n ],\n products: [\n // Products define the executables and libraries a package produces, and make them visible to other packages.\n .library(\n name: "tauri-plugin-sample",\n type: .static,\n targets: ["tauri-plugin-sample"]),\n ],\n dependencies: [\n // Dependencies declare other packages that this package depends on.\n .package(name: "Tauri", path: "../../../../../crates/tauri/mobile/ios-api")\n ],\n targets: [\n // Targets are the basic building blocks of a package. A target can define a module or a test suite.\n // Targets can depend on other targets in this package, and on products in packages this package depends on.\n .target(\n name: "tauri-plugin-sample",\n dependencies: [\n .byName(name: "Tauri")\n ],\n path: "Sources")\n ]\n)\n | dataset_sample\swift\tauri-apps_tauri\examples\api\src-tauri\tauri-plugin-sample\ios\Package.swift | Package.swift | Swift | 1,201 | 0.95 | 0 | 0.242424 | node-utils | 448 | 2024-04-24T14:25:45.537211 | Apache-2.0 | false | a6734d4f390f348698ba35e9e2aec5bd |
// swift-tools-version:6.0\n// The swift-tools-version declares the minimum version of Swift required to build this package.\n\nimport PackageDescription\n\nlet package = Package(\n name: "server",\n platforms: [\n .macOS("10.15")\n ],\n dependencies: [\n // Dependencies declare other packages that this package depends on.\n .package(url: "https://github.com/swhitty/FlyingFox.git", .upToNextMajor(from: "0.18.0"))\n ],\n targets: [\n // Targets are the basic building blocks of a package. A target can define a module or a test suite.\n // Targets can depend on other targets in this package, and on products in packages which this package depends on.\n .executableTarget(\n name: "server",\n dependencies: [\n .product(name: "FlyingFox", package: "FlyingFox")\n ],\n swiftSettings: [\n // Enable better optimizations when building in Release configuration. Despite the use of\n // the `.unsafeFlags` construct required by SwiftPM, this flag is recommended for Release\n // builds. See <https://github.com/swift-server/guides#building-for-production> for details.\n .unsafeFlags(["-cross-module-optimization"], .when(configuration: .release))\n ])\n ]\n)\n | dataset_sample\swift\the-benchmarker_web-frameworks\flying-fox\Package.swift | Package.swift | Swift | 1,322 | 0.95 | 0.1 | 0.285714 | node-utils | 249 | 2025-05-09T05:37:36.471589 | BSD-3-Clause | false | 69fd5dbea08de5a9e5a4a0713a9e7c98 |
import FlyingFox\n\n@main\nstruct Server {\n static func main() async throws {\n let server = HTTPServer(port: 3000)\n\n await server.appendRoute("GET /") { _ in\n return HTTPResponse(statusCode: .ok)\n }\n\n await server.appendRoute("GET /user/:id") { request in\n let user = request.routeParameters["id"] ?? ""\n return HTTPResponse(statusCode: .ok, body: user.data(using: .utf8)!)\n }\n\n await server.appendRoute("POST /user") { _ in\n return HTTPResponse(statusCode: .ok)\n }\n\n try await server.start()\n }\n}\n | dataset_sample\swift\the-benchmarker_web-frameworks\flying-fox\Sources\server\Server.swift | Server.swift | Swift | 601 | 0.85 | 0.043478 | 0 | node-utils | 476 | 2025-05-05T12:58:16.813577 | Apache-2.0 | false | 25d7b7310af8476ad73b26587f8c4b71 |
// swift-tools-version:5.10\n// The swift-tools-version declares the minimum version of Swift required to build this package.\n\nimport PackageDescription\n\nlet package = Package(\n name: "server",\n platforms: [.macOS(.v14)], // This is for development on macOS\n dependencies: [\n .package(url: "https://github.com/hummingbird-project/hummingbird.git", .upToNextMinor(from: "2.0.0")),\n ],\n targets: [\n .executableTarget(\n name: "server",\n dependencies: [\n .product(name: "Hummingbird", package: "hummingbird"),\n ],\n swiftSettings: [\n // Enable better optimizations when building in Release configuration. Despite the use of\n // the `.unsafeFlags` construct required by SwiftPM, this flag is recommended for Release\n // builds. See <https://github.com/swift-server/guides#building-for-production> for details.\n .unsafeFlags(["-cross-module-optimization"], .when(configuration: .release))\n ]\n ),\n ]\n)\n | dataset_sample\swift\the-benchmarker_web-frameworks\hummingbird-framework\Package.swift | Package.swift | Swift | 1,062 | 0.95 | 0.153846 | 0.208333 | react-lib | 155 | 2023-10-29T02:19:30.942723 | MIT | false | dd9c37e851b5a0b89c86c440448fed38 |
import Hummingbird\n\n@main\nstruct Server {\n static func main() async throws {\n let env = Environment()\n let serverHostName = env.get("SERVER_HOSTNAME") ?? "127.0.0.1"\n let serverPort = env.get("SERVER_PORT", as: Int.self) ?? 8080\n\n let router = Router()\n router.get("/") { _, _ in\n HTTPResponse.Status.ok\n }\n\n router.get("user/:id") { _, ctx -> String in\n ctx.parameters.get("id") ?? ""\n }\n\n router.post("user") { _, _ in\n HTTPResponse.Status.ok\n }\n\n let app = Application(\n router: router,\n configuration: .init(address: .hostname(serverHostName, port: serverPort))\n )\n\n try await app.run()\n }\n}\n | dataset_sample\swift\the-benchmarker_web-frameworks\hummingbird-framework\Sources\server\Server.swift | Server.swift | Swift | 748 | 0.85 | 0.033333 | 0 | awesome-app | 516 | 2024-03-10T02:51:00.829037 | Apache-2.0 | false | d5e7c6a358c580a2c842b9c601b5770e |
// swift-tools-version:5.1\n\nimport PackageDescription\n\nlet package = Package(\n name: "server",\n dependencies: [\n .package(url: "https://github.com/Kitura/Kitura", .upToNextMinor(from: "3.0.1"))\n ],\n targets: [\n .target(\n name: "server", \n dependencies: ["Kitura"],\n swiftSettings: [\n // Enable better optimizations when building in Release configuration. Despite the use of\n // the `.unsafeFlags` construct required by SwiftPM, this flag is recommended for Release\n // builds. See <https://github.com/swift-server/guides#building-for-production> for details.\n .unsafeFlags(["-cross-module-optimization"], .when(configuration: .release))\n ]\n )\n ]\n)\n | dataset_sample\swift\the-benchmarker_web-frameworks\kitura\Package.swift | Package.swift | Swift | 788 | 0.95 | 0.136364 | 0.2 | react-lib | 901 | 2023-10-25T04:13:01.351346 | MIT | false | c729a16f6465db1c20c590a7bb71f09c |
import Kitura\n\nlet router = Router()\n\nrouter.get("/") { _, res, next in\n try res.send("")\n next()\n}\n\n/**\n In this case, the path /:id(\\d+) specifies that only digits should be matched.\n This path will be matched for /123, but not / or /abc.\n */\nrouter.get("/user/:id(\\d+)") { req, res, next in\n let userId = req.parameters["id"] ?? ""\n try res.send(userId).end()\n next()\n}\n\nrouter.post("/user") { _, res, next in\n try res.send("")\n next()\n}\n\nKitura.addHTTPServer(onPort: 3000, with: router)\nKitura.run()\n | dataset_sample\swift\the-benchmarker_web-frameworks\kitura\Sources\server\main.swift | main.swift | Swift | 523 | 0.95 | 0.153846 | 0.095238 | react-lib | 395 | 2025-05-02T17:22:35.470922 | BSD-3-Clause | false | 956af5d680fd53b9750b58185d0d212b |
// swift-tools-version:5.0\n\nimport PackageDescription\n\nlet package = Package(\n name: "MyServer",\n dependencies: [\n .package(url: "https://github.com/httpswift/swifter.git", .upToNextMinor(from: "1.5.0"))\n ],\n targets: [\n .target(\n name: "server", \n dependencies: ["Swifter"],\n swiftSettings: [\n // Enable better optimizations when building in Release configuration. Despite the use of\n // the `.unsafeFlags` construct required by SwiftPM, this flag is recommended for Release\n // builds. See <https://github.com/swift-server/guides#building-for-production> for details.\n .unsafeFlags(["-cross-module-optimization"], .when(configuration: .release))\n ] \n ),\n ]\n)\n | dataset_sample\swift\the-benchmarker_web-frameworks\swifter-framework\Package.swift | Package.swift | Swift | 816 | 0.95 | 0.136364 | 0.2 | python-kit | 903 | 2024-05-16T21:23:02.914462 | Apache-2.0 | false | 3ce1a5f9b8d97387b0a9120753b8bb8b |
import Swifter\nimport Dispatch\n\nlet server = HttpServer()\n\nserver.GET["/"] = { _ in\n return HttpResponse.ok(.text(""))\n}\n\nserver.GET["/user/:id"] = { request in\n let userId = request.params[":id"] ?? ""\n return HttpResponse.ok(.text("\(userId)"))\n}\n\nserver.POST["/user"] = { _ in\n return HttpResponse.ok(.text(""))\n}\n\nlet semaphore = DispatchSemaphore(value: 0)\ndo {\n try server.start(3000, forceIPv4: true)\n print("Server has started ( port = \(try server.port()) ). Try to connect now...")\n semaphore.wait()\n} catch {\n print("Server start error: \(error)")\n semaphore.signal()\n}\n | dataset_sample\swift\the-benchmarker_web-frameworks\swifter-framework\Sources\server\main.swift | main.swift | Swift | 598 | 0.85 | 0.111111 | 0 | react-lib | 280 | 2023-08-16T21:32:02.803700 | GPL-3.0 | false | 29303302b1318373cbe955d602157538 |
// swift-tools-version:5.2\n// The swift-tools-version declares the minimum version of Swift required to build this package.\n\nimport PackageDescription\n\nlet package = Package(\n name: "server",\n platforms: [\n .macOS("10.15")\n ],\n dependencies: [\n // Dependencies declare other packages that this package depends on.\n .package(url: "https://github.com/vapor/vapor.git", .upToNextMinor(from: "4.114.0"))\n ],\n targets: [\n // Targets are the basic building blocks of a package. A target can define a module or a test suite.\n // Targets can depend on other targets in this package, and on products in packages which this package depends on.\n .target(\n name: "server",\n dependencies: [\n .product(name: "Vapor", package: "vapor")\n ],\n swiftSettings: [\n // Enable better optimizations when building in Release configuration. Despite the use of\n // the `.unsafeFlags` construct required by SwiftPM, this flag is recommended for Release\n // builds. See <https://github.com/swift-server/guides#building-for-production> for details.\n .unsafeFlags(["-cross-module-optimization"], .when(configuration: .release))\n ]),\n \n ]\n)\n | dataset_sample\swift\the-benchmarker_web-frameworks\vapor-framework\Package.swift | Package.swift | Swift | 1,313 | 0.95 | 0.096774 | 0.285714 | vue-tools | 149 | 2024-07-03T20:43:25.995213 | GPL-3.0 | false | 812341a5557e9d4a71c9721476aad1b6 |
import Vapor\n\nvar env = Environment(name: Environment.get("VAPOR_ENV") ?? "development")\ntry LoggingSystem.bootstrap(from: &env)\nlet app = Application()\ndefer { app.shutdown() }\napp.middleware = .init()\n\napp.logger.logLevel = .critical\n\napp.get { _ in\n Response()\n}\n\napp.get("user", ":userID") { req in\n req.parameters.get("userID") ?? ""\n}\n\napp.post("user") { _ in\n Response()\n}\n\napp.post("empty") { _ in\n Response()\n}\n\napp.http.server.configuration.hostname = Environment.get("SERVER_HOSTNAME") ?? "0.0.0.0"\nif let portString = Environment.get("SERVER_PORT"), let port = Int(portString) {\n app.http.server.configuration.port = port\n} else {\n app.http.server.configuration.port = 3000\n}\n\ntry app.run()\n | dataset_sample\swift\the-benchmarker_web-frameworks\vapor-framework\Sources\server\main.swift | main.swift | Swift | 721 | 0.85 | 0.088235 | 0 | vue-tools | 835 | 2025-06-22T03:05:23.012518 | GPL-3.0 | false | 9a4b9f893be741a81ab602bc36e7ff13 |
//\n// AppDelegate.swift\n// DylibExample\n//\n// Created by Nathan Horrigan on 15/08/2021.\n//\n\nimport UIKit\n\n@main\nclass AppDelegate: UIResponder, UIApplicationDelegate {\n\n\n\n func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {\n // Override point for customization after application launch.\n return true\n }\n\n // MARK: UISceneSession Lifecycle\n\n func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration {\n // Called when a new scene session is being created.\n // Use this method to select a configuration to create the new scene with.\n return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role)\n }\n\n func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set<UISceneSession>) {\n // Called when the user discards a scene session.\n // If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions.\n // Use this method to release any resources that were specific to the discarded scenes, as they will not return.\n }\n\n\n}\n\n | dataset_sample\swift\wasmerio_wasmer\tests\integration\ios\DylibExample\DylibExample\AppDelegate.swift | AppDelegate.swift | Swift | 1,357 | 0.95 | 0.083333 | 0.52 | node-utils | 69 | 2023-12-19T14:40:19.515372 | MIT | true | 60567ea09ff40b95868c152e2fd250a6 |
//\n// SceneDelegate.swift\n// DylibExample\n//\n// Created by Nathan Horrigan on 15/08/2021.\n//\n\nimport UIKit\n\nclass SceneDelegate: UIResponder, UIWindowSceneDelegate {\n\n var window: UIWindow?\n\n\n func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) {\n // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`.\n // If using a storyboard, the `window` property will automatically be initialized and attached to the scene.\n // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead).\n guard let _ = (scene as? UIWindowScene) else { return }\n }\n\n func sceneDidDisconnect(_ scene: UIScene) {\n // Called as the scene is being released by the system.\n // This occurs shortly after the scene enters the background, or when its session is discarded.\n // Release any resources associated with this scene that can be re-created the next time the scene connects.\n // The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead).\n }\n\n func sceneDidBecomeActive(_ scene: UIScene) {\n // Called when the scene has moved from an inactive state to an active state.\n // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive.\n }\n\n func sceneWillResignActive(_ scene: UIScene) {\n // Called when the scene will move from an active state to an inactive state.\n // This may occur due to temporary interruptions (ex. an incoming phone call).\n }\n\n func sceneWillEnterForeground(_ scene: UIScene) {\n // Called as the scene transitions from the background to the foreground.\n // Use this method to undo the changes made on entering the background.\n }\n\n func sceneDidEnterBackground(_ scene: UIScene) {\n // Called as the scene transitions from the foreground to the background.\n // Use this method to save data, release shared resources, and store enough scene-specific state information\n // to restore the scene back to its current state.\n }\n\n\n}\n\n | dataset_sample\swift\wasmerio_wasmer\tests\integration\ios\DylibExample\DylibExample\SceneDelegate.swift | SceneDelegate.swift | Swift | 2,300 | 0.95 | 0.019231 | 0.564103 | vue-tools | 954 | 2024-07-08T02:02:09.899359 | Apache-2.0 | true | 21ab9933c1d9a87a7ca8b5c14c6e5d26 |
//\n// ViewController.swift\n// DylibExample\n//\n// Created by Nathan Horrigan on 15/08/2021.\n//\n\nimport UIKit\n\nclass ViewController: UIViewController {\n @IBOutlet weak var label: UILabel!\n \n override func viewDidLoad() {\n super.viewDidLoad()\n let sum = calculate_sum(1, 3)\n label.text = "The sum of 1 + 3 = \(sum)"\n }\n}\n | dataset_sample\swift\wasmerio_wasmer\tests\integration\ios\DylibExample\DylibExample\ViewController.swift | ViewController.swift | Swift | 354 | 0.95 | 0.055556 | 0.4 | vue-tools | 33 | 2025-03-28T03:35:21.395677 | Apache-2.0 | true | cc66953888c9cf05b80ef5ee41e16463 |
// swift-tools-version:5.3\nimport PackageDescription\n\nlet package = Package(\n name: "TreeSitterCmake",\n platforms: [.macOS(.v10_13), .iOS(.v11)],\n products: [\n .library(name: "TreeSitterCmake", targets: ["TreeSitterCmake"]),\n ],\n dependencies: [],\n targets: [\n .target(name: "TreeSitterCmake",\n path: ".",\n exclude: [\n "Cargo.toml",\n "Makefile",\n "binding.gyp",\n "bindings/c",\n "bindings/go",\n "bindings/node",\n "bindings/python",\n "bindings/rust",\n "prebuilds",\n "grammar.js",\n "package.json",\n "package-lock.json",\n "pyproject.toml",\n "setup.py",\n "test",\n "examples",\n ".editorconfig",\n ".github",\n ".gitignore",\n ".gitattributes",\n ".gitmodules",\n ],\n sources: [\n "src/parser.c",\n // NOTE: if your language has an external scanner, add it here.\n ],\n resources: [\n .copy("queries")\n ],\n publicHeadersPath: "bindings/swift",\n cSettings: [.headerSearchPath("src")])\n ],\n cLanguageStandard: .c11\n)\n | dataset_sample\swift\Wilfred_difftastic\vendored_parsers\tree-sitter-cmake\Package.swift | Package.swift | Swift | 1,527 | 0.95 | 0.020833 | 0.042553 | node-utils | 366 | 2024-10-29T02:24:28.021204 | BSD-3-Clause | false | abd5eeb4f030109b4d1231a505b3b00b |
// swift-tools-version:5.3\n\nimport PackageDescription\n\nlet package = Package(\n name: "TreeSitterElm",\n platforms: [.macOS(.v10_13), .iOS(.v11)],\n products: [\n .library(name: "TreeSitterElm", targets: ["TreeSitterElm"]),\n ],\n dependencies: [],\n targets: [\n .target(name: "TreeSitterElm",\n path: ".",\n exclude: [\n "binding.gyp",\n "bindings",\n "Cargo.toml",\n "docs",\n "examples",\n "grammar.js",\n "HOW_TO_RELEASE.md",\n "index.d.ts",\n "LICENSE.md",\n "package.json",\n "README.md",\n "script",\n "src/grammar.json",\n "src/node-types.json",\n "test",\n "tsconfig.json",\n ],\n sources: [\n "src/parser.c",\n "src/scanner.c",\n ],\n resources: [\n .copy("queries")\n ],\n publicHeadersPath: "bindings/swift",\n cSettings: [.headerSearchPath("src")])\n ]\n)\n | dataset_sample\swift\Wilfred_difftastic\vendored_parsers\tree-sitter-elm\Package.swift | Package.swift | Swift | 1,263 | 0.95 | 0 | 0.02439 | react-lib | 38 | 2023-11-22T03:26:01.858853 | Apache-2.0 | false | 762db482d073a011cc40cf88367d66df |
// swift-tools-version:5.3\nimport PackageDescription\n\nlet package = Package(\n name: "TreeSitterLatex",\n platforms: [.macOS(.v10_13), .iOS(.v11)],\n products: [\n .library(name: "TreeSitterLatex", targets: ["TreeSitterLatex"]),\n ],\n dependencies: [],\n targets: [\n .target(name: "TreeSitterLatex",\n path: ".",\n exclude: [\n "Cargo.toml",\n "Makefile",\n "binding.gyp",\n "bindings/c",\n "bindings/go",\n "bindings/node",\n "bindings/python",\n "bindings/rust",\n "prebuilds",\n "grammar.js",\n "package.json",\n "package-lock.json",\n "pyproject.toml",\n "setup.py",\n "test",\n "examples",\n ".editorconfig",\n ".github",\n ".gitignore",\n ".gitattributes",\n ".gitmodules",\n ],\n sources: [\n "src/parser.c",\n "src/scanner.c",\n ],\n resources: [\n .copy("queries")\n ],\n publicHeadersPath: "bindings/swift",\n cSettings: [.headerSearchPath("src")])\n ],\n cLanguageStandard: .c11\n)\n | dataset_sample\swift\Wilfred_difftastic\vendored_parsers\tree-sitter-latex\Package.swift | Package.swift | Swift | 1,480 | 0.95 | 0 | 0.021277 | node-utils | 992 | 2024-12-24T07:22:10.682461 | Apache-2.0 | false | b7431cbdb2f44968ef948c9617988692 |
// swift-tools-version:5.3\n\nimport PackageDescription\n\nlet package = Package(\n name: "TreeSitterPerl",\n platforms: [.macOS(.v10_13), .iOS(.v11)],\n products: [\n .library(name: "TreeSitterPerl", targets: ["TreeSitterPerl"]),\n ],\n dependencies: [],\n targets: [\n .target(name: "TreeSitterPerl",\n path: ".",\n exclude: [\n "binding.gyp",\n "bindings",\n "Cargo.toml",\n "examples",\n "grammar.js",\n "LICENSE",\n "package-lock.json",\n "package.json",\n "README.md",\n "src/grammar.json",\n "src/node-types.json",\n ],\n sources: [\n "src/parser.c",\n "src/scanner.c",\n ],\n publicHeadersPath: "bindings/swift",\n cSettings: [.headerSearchPath("src")])\n ]\n)\n | dataset_sample\swift\Wilfred_difftastic\vendored_parsers\tree-sitter-perl\Package.swift | Package.swift | Swift | 1,022 | 0.95 | 0 | 0.030303 | awesome-app | 960 | 2024-12-08T01:26:26.542928 | MIT | false | 1ec84b02dbf5ef69a480788cbd072ec4 |
// swift-tools-version:5.3\nimport PackageDescription\n\nlet package = Package(\n name: "TreeSitterSmali",\n platforms: [.macOS(.v10_13), .iOS(.v11)],\n products: [\n .library(name: "TreeSitterSmali", targets: ["TreeSitterSmali"]),\n ],\n dependencies: [],\n targets: [\n .target(name: "TreeSitterSmali",\n path: ".",\n exclude: [\n "binding.gyp",\n "bindings",\n "Cargo.toml",\n "test",\n "grammar.js",\n "LICENSE",\n "package.json",\n "README.md",\n "script",\n "src/grammar.json",\n "src/node-types.json",\n ],\n sources: [\n "src/parser.c",\n "src/scanner.c",\n ],\n resources: [\n .copy("queries")\n ],\n publicHeadersPath: "bindings/swift",\n cSettings: [.headerSearchPath("src")])\n ]\n)\n | dataset_sample\swift\Wilfred_difftastic\vendored_parsers\tree-sitter-smali\Package.swift | Package.swift | Swift | 1,095 | 0.95 | 0 | 0.027778 | node-utils | 248 | 2023-09-07T07:37:46.116540 | BSD-3-Clause | false | 0c14f54af4706c39e25eeefa708ba4cd |
//\n// Generated file. Do not edit.\n//\n\nimport FlutterMacOS\nimport Foundation\n\nimport desktop_drop\nimport device_info_plus\nimport hotkey_manager_macos\nimport macos_window_utils\nimport path_provider_foundation\nimport protocol_handler_macos\nimport syncfusion_pdfviewer_macos\nimport url_launcher_macos\n\nfunc RegisterGeneratedPlugins(registry: FlutterPluginRegistry) {\n DesktopDropPlugin.register(with: registry.registrar(forPlugin: "DesktopDropPlugin"))\n DeviceInfoPlusMacosPlugin.register(with: registry.registrar(forPlugin: "DeviceInfoPlusMacosPlugin"))\n HotkeyManagerMacosPlugin.register(with: registry.registrar(forPlugin: "HotkeyManagerMacosPlugin"))\n MacOSWindowUtilsPlugin.register(with: registry.registrar(forPlugin: "MacOSWindowUtilsPlugin"))\n PathProviderPlugin.register(with: registry.registrar(forPlugin: "PathProviderPlugin"))\n ProtocolHandlerMacosPlugin.register(with: registry.registrar(forPlugin: "ProtocolHandlerMacosPlugin"))\n SyncfusionFlutterPdfViewerPlugin.register(with: registry.registrar(forPlugin: "SyncfusionFlutterPdfViewerPlugin"))\n UrlLauncherPlugin.register(with: registry.registrar(forPlugin: "UrlLauncherPlugin"))\n}\n | dataset_sample\swift\Wox-launcher_Wox\wox.ui.flutter\wox\macos\Flutter\GeneratedPluginRegistrant.swift | GeneratedPluginRegistrant.swift | Swift | 1,153 | 0.95 | 0 | 0.130435 | node-utils | 786 | 2025-04-07T06:01:56.879678 | MIT | false | 5523ea73c21a847b17265901d54dba11 |
import Cocoa\nimport FlutterMacOS\n\n@main\nclass AppDelegate: FlutterAppDelegate {\n // Store the previous active application\n private var previousActiveApp: NSRunningApplication?\n // Flutter method channel for window events\n private var windowEventChannel: FlutterMethodChannel?\n \n private func log(_ message: String) {\n // NSLog("WoxApp: \(message)")\n }\n \n override func applicationShouldTerminateAfterLastWindowClosed(_ sender: NSApplication) -> Bool {\n return false\n }\n\n override func applicationSupportsSecureRestorableState(_ app: NSApplication) -> Bool {\n return true\n }\n \n /// Apply acrylic effect to window\n private func applyAcrylicEffect(to window: NSWindow) {\n // Ensure light theme is used, otherwise the dark theme effect the theme color\n window.appearance = NSAppearance(named: .aqua)\n \n if let contentView = window.contentView {\n let effectView = NSVisualEffectView(frame: contentView.bounds)\n effectView.material = .popover\n effectView.state = .active\n effectView.blendingMode = .behindWindow\n // Ensure the effect view resizes with the window\n effectView.autoresizingMask = [.width, .height]\n contentView.addSubview(effectView, positioned: .below, relativeTo: nil)\n \n // Try to make all Flutter-related views transparent\n for subview in contentView.subviews where !(subview is NSVisualEffectView) {\n subview.wantsLayer = true\n subview.layer?.backgroundColor = NSColor.clear.cgColor\n }\n }\n }\n \n // Setup notification for window blur event\n private func setupWindowBlurNotification() {\n guard let window = self.mainFlutterWindow else { return }\n \n NotificationCenter.default.addObserver(\n self,\n selector: #selector(windowDidResignKey),\n name: NSWindow.didResignKeyNotification,\n object: window\n )\n }\n \n // Handle window loss of focus\n @objc private func windowDidResignKey(_ notification: Notification) {\n log("Window did resign key (blur)")\n // Notify Flutter about the window blur event\n DispatchQueue.main.async {\n self.windowEventChannel?.invokeMethod("onWindowBlur", arguments: nil)\n }\n }\n \n override func applicationDidFinishLaunching(_ notification: Notification) {\n let controller = self.mainFlutterWindow?.contentViewController as! FlutterViewController\n \n // Try to make Flutter view background transparent\n let flutterView = controller.view\n flutterView.wantsLayer = true\n flutterView.layer?.backgroundColor = NSColor.clear.cgColor\n \n let channel = FlutterMethodChannel(\n name: "com.wox.macos_window_manager",\n binaryMessenger: controller.engine.binaryMessenger)\n \n // Store window event channel for use in window events\n self.windowEventChannel = channel\n \n // Setup window blur notification\n setupWindowBlurNotification()\n \n channel.setMethodCallHandler { [weak self] (call, result) in\n guard let window = self?.mainFlutterWindow else {\n result(FlutterError(code: "NO_WINDOW", message: "No window found", details: nil))\n return\n }\n \n DispatchQueue.main.async {\n switch call.method {\n case "setSize":\n if let args = call.arguments as? [String: Any],\n let width = args["width"] as? Double,\n let height = args["height"] as? Double {\n let size = NSSize(width: width, height: height)\n window.setContentSize(size)\n result(nil)\n } else {\n result(FlutterError(code: "INVALID_ARGS", message: "Invalid arguments for setSize", details: nil))\n }\n \n case "getPosition":\n let frame = window.frame\n let screenFrame = window.screen?.frame ?? NSScreen.main?.frame ?? NSRect.zero\n // Convert to bottom-left origin coordinate system\n let x = frame.origin.x\n let y = screenFrame.height - frame.origin.y - frame.height\n result(["x": x, "y": y])\n \n case "setPosition":\n if let args = call.arguments as? [String: Any],\n let x = args["x"] as? Double,\n let y = args["y"] as? Double {\n let screenFrame = window.screen?.frame ?? NSScreen.main?.frame ?? NSRect.zero\n // Convert from bottom-left to top-left origin coordinate system\n let flippedY = screenFrame.height - y - window.frame.height\n window.setFrameOrigin(NSPoint(x: x, y: flippedY))\n result(nil)\n } else {\n result(FlutterError(code: "INVALID_ARGS", message: "Invalid arguments for setPosition", details: nil))\n }\n \n case "center":\n let screenFrame = window.screen?.frame ?? NSScreen.main?.frame ?? NSRect.zero\n var windowWidth: CGFloat = window.frame.width\n var windowHeight: CGFloat = window.frame.height\n if let args = call.arguments as? [String: Any] {\n if let width = args["width"] as? Double {\n windowWidth = CGFloat(width)\n }\n if let height = args["height"] as? Double {\n windowHeight = CGFloat(height)\n }\n }\n \n let x = (screenFrame.width - windowWidth) / 2 + screenFrame.minX\n let y = (screenFrame.height - windowHeight) / 2 + screenFrame.minY\n \n let newFrame = NSRect(x: x, y: y, width: windowWidth, height: windowHeight)\n window.setFrame(newFrame, display: true)\n result(nil)\n \n case "show":\n self?.log("Showing Wox window")\n // Save the current frontmost application before activating Wox\n if let frontApp = NSWorkspace.shared.frontmostApplication, frontApp != NSRunningApplication.current {\n self?.log("Saving previous active app: \(frontApp.localizedName ?? "Unknown") (bundleID: \(frontApp.bundleIdentifier ?? "Unknown"))")\n self?.previousActiveApp = frontApp\n } else {\n self?.log("No suitable previous app to save")\n }\n\n window.makeKeyAndOrderFront(nil)\n NSApp.activate(ignoringOtherApps: true)\n result(nil)\n \n case "hide":\n self?.log("Hiding Wox window")\n window.orderOut(nil)\n // Activate the previous active application after hiding Wox\n if let prevApp = self?.previousActiveApp {\n self?.log("Activating previous app: \(prevApp.localizedName ?? "Unknown") (bundleID: \(prevApp.bundleIdentifier ?? "Unknown"))")\n prevApp.activate(options: .activateIgnoringOtherApps)\n } else {\n self?.log("No previous app saved, looking for any other app to activate")\n }\n result(nil)\n \n case "focus":\n window.makeKeyAndOrderFront(nil)\n NSApp.activate(ignoringOtherApps: true)\n result(nil)\n \n case "isVisible":\n result(window.isVisible)\n \n case "setAlwaysOnTop":\n if let alwaysOnTop = call.arguments as? Bool {\n if alwaysOnTop {\n window.level = .popUpMenu\n } else {\n window.level = .normal\n }\n \n result(nil)\n } else {\n result(FlutterError(code: "INVALID_ARGS", message: "Invalid arguments for setAlwaysOnTop", details: nil))\n }\n \n case "startDragging":\n if let currentEvent = window.currentEvent {\n self?.log("Performing drag with event: \(currentEvent)")\n window.performDrag(with: currentEvent)\n } \n result(nil)\n \n case "waitUntilReadyToShow":\n // Force appearance to light mode, otherwise borderless window will have a dark border line\n NSApp.appearance = NSAppearance(named: .aqua)\n\n window.level = .popUpMenu\n window.titlebarAppearsTransparent = true\n window.styleMask.insert(.fullSizeContentView)\n window.styleMask.insert(.nonactivatingPanel)\n window.styleMask.remove(.resizable)\n\n // Hide windows buttons\n window.titleVisibility = .hidden\n window.standardWindowButton(.closeButton)?.isHidden = true\n window.standardWindowButton(.miniaturizeButton)?.isHidden = true\n window.standardWindowButton(.zoomButton)?.isHidden = true\n\n // Make window can join all spaces\n window.collectionBehavior.insert(.canJoinAllSpaces)\n window.collectionBehavior.insert(.fullScreenAuxiliary)\n window.styleMask.insert(.nonactivatingPanel)\n self?.applyAcrylicEffect(to: window)\n \n if let mainWindow = window as? MainFlutterWindow {\n mainWindow.isReadyToShow = true\n }\n\n result(nil)\n default:\n result(FlutterMethodNotImplemented)\n }\n }\n }\n \n super.applicationDidFinishLaunching(notification)\n }\n} | dataset_sample\swift\Wox-launcher_Wox\wox.ui.flutter\wox\macos\Runner\AppDelegate.swift | AppDelegate.swift | Swift | 9,007 | 0.95 | 0.094828 | 0.102041 | vue-tools | 925 | 2025-02-19T04:06:43.580056 | MIT | false | b1826526e50333a57f2ba1dc6a55470e |
import Cocoa\nimport FlutterMacOS\n\nclass MainFlutterWindow: NSPanel {\n var isReadyToShow: Bool = false\n \n override func awakeFromNib() {\n let flutterViewController = FlutterViewController()\n let windowFrame = self.frame\n self.contentViewController = flutterViewController\n self.setFrame(windowFrame, display: false)\n\n RegisterGeneratedPlugins(registry: flutterViewController)\n\n super.awakeFromNib()\n }\n\n override public func order(_ place: NSWindow.OrderingMode, relativeTo otherWin: Int) {\n super.order(place, relativeTo: otherWin)\n \n if !isReadyToShow {\n setIsVisible(false)\n }\n }\n} | dataset_sample\swift\Wox-launcher_Wox\wox.ui.flutter\wox\macos\Runner\MainFlutterWindow.swift | MainFlutterWindow.swift | Swift | 624 | 0.85 | 0.083333 | 0 | python-kit | 605 | 2024-06-28T20:21:12.461154 | BSD-3-Clause | false | 7c353baf1b192d5af95596b32a254d9c |
name: 'Bug Report'\ndescription: 'Report an Bug'\ntitle: '[Bug] '\nassignees: wanghe-fit2cloud\nbody:\n - type: markdown\n attributes:\n value: "## Contact Information"\n - type: input\n validations:\n required: false\n attributes:\n label: "Contact Information"\n description: "The ways to quickly contact you: WeChat group number and nickname, email, etc."\n - type: markdown\n attributes:\n value: "## Environment Information"\n - type: input\n validations:\n required: true\n attributes:\n label: "1Panel Version"\n description: "Log in to the 1Panel Web console and check the current version at the bottom right of the page."\n - type: markdown\n attributes:\n value: "## Detailed information"\n - type: textarea\n attributes:\n label: "Problem Description"\n description: "Briefly describe the issue you’ve encountered."\n validations:\n required: true\n - type: textarea\n attributes:\n label: "Steps to Reproduce"\n description: "How can this issue be reproduced."\n validations:\n required: true\n - type: textarea\n attributes:\n label: "The expected correct result"\n - type: textarea\n attributes:\n label: "Related log output"\n description: "Please paste any relevant log output here. It will automatically be formatted as code, so no backticks are necessary."\n render: shell\n - type: textarea\n attributes:\n label: "Additional Information"\n description: "If you have any additional information to provide, you can include it here (screenshots, videos, etc., are welcome)."\n | dataset_sample\yaml\1Panel-dev_1Panel\.github\ISSUE_TEMPLATE\1_bug_report.yml | 1_bug_report.yml | YAML | 1,602 | 0.95 | 0 | 0 | python-kit | 184 | 2024-02-09T23:29:22.350345 | GPL-3.0 | false | fa41ca8a70e7650f76d537d8a1f11130 |
name: 'Feature Request'\ndescription: 'Suggest an idea'\ntitle: '[Feature] '\nassignees: wanghe-fit2cloud\nbody:\n - type: markdown\n attributes:\n value: "## Environment Information"\n - type: input\n validations:\n required: true\n attributes:\n label: "1Panel Version"\n description: "Log in to the 1Panel Web console and check the current version at the bottom right of the page."\n - type: markdown\n attributes:\n value: "## Detailed information"\n - type: textarea\n attributes:\n label: "Please describe your needs or suggestions for improvements"\n validations:\n required: true\n - type: textarea\n attributes:\n label: "Please describe the solution you suggest"\n - type: textarea\n attributes:\n label: "Additional Information"\n description: "If you have any additional information to provide, you can include it here (screenshots, videos, etc., are welcome)." | dataset_sample\yaml\1Panel-dev_1Panel\.github\ISSUE_TEMPLATE\2_feature_request.yml | 2_feature_request.yml | YAML | 925 | 0.95 | 0.035714 | 0 | python-kit | 110 | 2024-05-10T00:14:24.796151 | GPL-3.0 | false | 1cdcbdea9efa0a57076950312f1ad2ad |
blank_issues_enabled: false\ncontact_links:\n - name: Questions & Discussions\n url: https://github.com/1Panel-dev/1Panel/discussions\n about: Raise questions about the installation, deployment, use and other aspects of the project.\n | dataset_sample\yaml\1Panel-dev_1Panel\.github\ISSUE_TEMPLATE\config.yml | config.yml | YAML | 236 | 0.8 | 0 | 0 | vue-tools | 559 | 2023-08-08T18:29:10.974856 | BSD-3-Clause | false | 368e7829051db83e00ce2d48fe32294b |
name: General PR Handling for 1Panel\non: pull_request\npermissions:\n pull-requests: write\njobs:\n generic_handler:\n name: Add Labels to PR\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n - uses: actions-ecosystem/action-add-labels@v1\n with:\n github_token: ${{ secrets.GITHUBTOKEN }}\n labels: ${{ github.base_ref }}\n | dataset_sample\yaml\1Panel-dev_1Panel\.github\workflows\add-labels-for-pr.yml | add-labels-for-pr.yml | YAML | 373 | 0.7 | 0.071429 | 0 | vue-tools | 19 | 2025-06-20T21:15:35.937987 | BSD-3-Clause | false | 1d0b355a24172d7f612788103a84bae8 |
name: Create Release And Upload assets\non:\n push:\n tags:\n - 'v*'\njobs:\n create-release:\n runs-on: ubuntu-latest\n steps:\n - name: Checkout Code\n uses: actions/checkout@v4\n - name: Setup Node\n uses: actions/setup-node@v4\n with:\n node-version: '20.2'\n - name: Build Web\n run: |\n cd frontend && npm install && npm run build:pro\n env:\n NODE_OPTIONS: --max-old-space-size=8192\n - name: Setup Go\n uses: actions/setup-go@v5\n with:\n go-version: '1.23'\n - name: Build Release\n uses: goreleaser/goreleaser-action@v6\n with:\n distribution: goreleaser\n version: '~> v2'\n args: release --skip=publish --clean\n - name: Upload Assets\n uses: softprops/action-gh-release@v1\n if: startsWith(github.ref, 'refs/tags/')\n with:\n draft: true\n files: |\n dist/*.tar.gz\n dist/checksums.txt\n - name: Setup OSSUTIL\n uses: yizhoumo/setup-ossutil@v2\n with:\n endpoint: ${{ secrets.OSS_ENDPOINT }}\n access-key-id: ${{ secrets.OSS_ACCESS_KEY_ID }}\n access-key-secret: ${{ secrets.OSS_ACCESS_KEY_SECRET }}\n ossutil-version: '1.7.18'\n - name: Upload Assets to OSS\n run: ossutil cp -r dist/ oss://resource-fit2cloud-com/1panel/package/stable/${{ github.ref_name }}/release/ --include "*.tar.gz" --include "checksums.txt" --only-current-dir --force\n | dataset_sample\yaml\1Panel-dev_1Panel\.github\workflows\build-publish-to-oss.yml | build-publish-to-oss.yml | YAML | 1,510 | 0.8 | 0.021277 | 0 | vue-tools | 814 | 2023-11-14T11:26:29.420717 | GPL-3.0 | false | 311f7918c261caa8d3798fb5a815e7f8 |
name: Create Release And Upload Cloudflare R2\non:\n push:\n tags:\n - 'v*'\njobs:\n create-release:\n runs-on: ubuntu-latest\n steps:\n - name: Checkout Code\n uses: actions/checkout@v4\n - name: Setup Node\n uses: actions/setup-node@v4\n with:\n node-version: '20.2'\n - name: Build Web\n run: |\n cd frontend && npm install && npm run build:pro\n env:\n NODE_OPTIONS: --max-old-space-size=8192\n - name: Setup Go\n uses: actions/setup-go@v5\n with:\n go-version: '1.23'\n - name: Build Release\n uses: goreleaser/goreleaser-action@v6\n with:\n distribution: goreleaser\n version: '~> v2'\n args: release --skip=publish --clean\n - name: Upload Assets\n uses: softprops/action-gh-release@v1\n if: startsWith(github.ref, 'refs/tags/')\n with:\n draft: true\n files: |\n dist/*.tar.gz\n dist/checksums.txt\n - name: Setup Rclone\n uses: AnimMouse/setup-rclone@v1\n with:\n rclone_config: ${{ secrets.RCLONE_CONFIG }}\n - name: Upload to Cloudflare R2\n run: |\n rclone copy dist/ cloudflare_r2:package/stable/${{ github.ref_name }}/release/ --include "*.tar.gz" --include "checksums.txt" --progress\n | dataset_sample\yaml\1Panel-dev_1Panel\.github\workflows\build-publish-to-r2.yml | build-publish-to-r2.yml | YAML | 1,337 | 0.8 | 0.022222 | 0 | react-lib | 198 | 2024-10-02T07:45:22.241575 | MIT | false | 82b393d31f39905b42e593d0e6cbbb03 |
name: Issue Translator\non: \n issue_comment: \n types: [created]\n issues: \n types: [opened]\njobs:\n build:\n runs-on: ubuntu-latest\n steps:\n - uses: usthe/issues-translate-action@v2.7\n with:\n IS_MODIFY_TITLE: true\n BOT_GITHUB_TOKEN: ${{ secrets.ISSUE_TRANSLATOR_TOKEN }} \n | dataset_sample\yaml\1Panel-dev_1Panel\.github\workflows\issue-translator.yml | issue-translator.yml | YAML | 313 | 0.7 | 0 | 0 | node-utils | 58 | 2024-05-25T09:57:16.003491 | Apache-2.0 | false | ab245600862f3878a65ba2a209b2d64c |
name: LLM Code Review\npermissions:\n contents: read\n pull-requests: write\non:\n pull_request:\n types: [opened, reopened, synchronize]\njobs:\n llm-code-review:\n runs-on: ubuntu-latest\n steps:\n - uses: fit2cloud/LLM-CodeReview-Action@main\n env:\n GITHUB_TOKEN: ${{ secrets.FIT2CLOUDRD_LLM_CODE_REVIEW_TOKEN }}\n OPENAI_API_KEY: ${{ secrets.ALIYUN_LLM_API_KEY }}\n LANGUAGE: English\n OPENAI_API_ENDPOINT: https://dashscope.aliyuncs.com/compatible-mode/v1\n MODEL: qwen2.5-coder-3b-instruct\n PROMPT: "Please check the following code differences for any irregularities, potential issues, or optimization suggestions, and provide your answers in English."\n top_p: 1\n temperature: 1\n # max_tokens: 10000\n MAX_PATCH_LENGTH: 10000 \n IGNORE_PATTERNS: "/node_modules,*.md,/dist,/.github"\n FILE_PATTERNS: "*.java,*.go,*.py,*.vue,*.ts,*.js,*.css,*.scss,*.html"\n | dataset_sample\yaml\1Panel-dev_1Panel\.github\workflows\llm-code-review.yml | llm-code-review.yml | YAML | 974 | 0.8 | 0.04 | 0.04 | vue-tools | 678 | 2025-03-02T16:40:54.736940 | GPL-3.0 | false | 3ec73085127ec012a8df280ac341a1ee |
name: SonarCloud Scan\non:\n push:\n branches:\n - dev\n pull_request:\n types: [opened, synchronize, reopened]\njobs:\n sonarcloud:\n name: SonarCloud\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n with:\n fetch-depth: 0\n - name: SonarCloud Scan\n uses: SonarSource/sonarcloud-github-action@master\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUBTOKEN }}\n SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}\n | dataset_sample\yaml\1Panel-dev_1Panel\.github\workflows\sonarcloud-scan.yml | sonarcloud-scan.yml | YAML | 473 | 0.7 | 0 | 0 | vue-tools | 581 | 2024-10-03T05:30:21.906870 | Apache-2.0 | false | 67a56b5664bc720b8fcc797953ab672c |
name: Synchronize to Gitee\non: [push]\njobs:\n repo-sync:\n runs-on: ubuntu-latest\n steps:\n - name: Mirror the Github organization repos to Gitee.\n uses: Yikun/hub-mirror-action@master\n with:\n src: 'github/1Panel-dev'\n dst: 'gitee/fit2cloud-feizhiyun'\n dst_key: ${{ secrets.GITEE_PRIVATE_KEY }}\n dst_token: ${{ secrets.GITEE_TOKEN }}\n static_list: "1Panel"\n force_update: true\n | dataset_sample\yaml\1Panel-dev_1Panel\.github\workflows\sync2gitee.yml | sync2gitee.yml | YAML | 455 | 0.7 | 0 | 0 | node-utils | 907 | 2024-05-31T15:28:37.158502 | BSD-3-Clause | false | faec7d151cd177e9215779ea3bc49a16 |
name: Typos Check\non: pull_request\njobs:\n run:\n name: Spell Check with Typos\n runs-on: ubuntu-latest\n steps:\n - name: Checkout Actions Repository\n uses: actions/checkout@v2\n - name: Check spelling\n uses: crate-ci/typos@master\n | dataset_sample\yaml\1Panel-dev_1Panel\.github\workflows\tyops-check.yml | tyops-check.yml | YAML | 252 | 0.7 | 0 | 0 | awesome-app | 219 | 2025-05-27T20:04:22.126735 | Apache-2.0 | false | 775de25bee9ea28e02cd9edd07bbee52 |
ErrInvalidParams: "Ralat parameter permintaan: {{ .detail }}"\nErrTokenParse: "Ralat penjanaan token: {{ .detail }}"\nErrInitialPassword: "Ralat kata laluan awal"\nErrInternalServer: "Ralat dalaman perkhidmatan: {{ .detail }}"\nErrRecordExist: "Rekod sudah wujud"\nErrRecordNotFound: "Rekod tidak dijumpai"\nErrStructTransform: "Kegagalan penukaran jenis: {{ .detail }}"\nErrNotLogin: "Pengguna belum log masuk: {{ .detail }}"\nErrPasswordExpired: "Kata laluan semasa telah tamat tempoh: {{ .detail }}"\nErrNotSupportType: "Sistem tidak menyokong jenis semasa: {{ .detail }}"\nErrApiConfigStatusInvalid: "Akses antara muka API dilarang: {{ .detail }}"\nErrApiConfigKeyInvalid: "Ralat kunci antara muka API: {{ .detail }}"\nErrApiConfigIPInvalid: "IP antara muka API tidak berada dalam senarai putih: {{ .detail }}"\nErrApiConfigDisable: "Antara muka ini melarang penggunaan panggilan API: {{ .detail }}"\nErrApiConfigKeyTimeInvalid: "Ralat cap waktu antara muka API: {{ .detail }}"\n\n#common\nErrNameIsExist: "Nama sudah wujud"\nErrDemoEnvironment: "Pelayan demo, operasi ini dilarang!"\nErrCmdTimeout: "Pelaksanaan arahan telah tamat masa!"\nErrCmdIllegal: "Arahan mengandungi aksara tidak sah. Sila ubah dan cuba lagi!"\nErrPortExist: 'Port {{ .port }} sudah digunakan oleh {{ .type }} [{{ .name }}]'\nTYPE_APP: "Aplikasi"\nTYPE_RUNTIME: "Persekitaran runtime"\nTYPE_DOMAIN: "Nama domain"\nErrTypePort: 'Format port {{ .name }} adalah salah'\nErrTypePortRange: 'Julat port perlu berada di antara 1-65535'\nSuccess: "Berjaya"\nFailed: "Gagal"\nSystemRestart: "Mulakan semula sistem menyebabkan gangguan tugas"\n\n#app\nErrPortInUsed: "Port {{ .detail }} sudah digunakan"\nErrAppLimit: "Aplikasi melebihi had pemasangan"\nErrAppRequired: "Aplikasi {{ .detail }} diperlukan"\nErrNotInstall: "Aplikasi tidak dipasang"\nErrPortInOtherApp: "Port {{ .port }} sudah digunakan oleh aplikasi {{ .apps }}"\nErrDbUserNotValid: "Pangkalan data stok, nama pengguna dan kata laluan tidak sepadan!"\nErrDockerComposeNotValid: "Format fail docker-compose adalah salah!"\nErrUpdateBuWebsite: "Aplikasi berjaya dikemas kini, tetapi pengubahsuaian fail konfigurasi laman web gagal, sila semak konfigurasi!"\nErr1PanelNetworkFailed: "Penciptaan rangkaian kontena lalai gagal! {{ .detail }}"\nErrFileParse: "Analisis fail docker-compose aplikasi gagal!"\nErrInstallDirNotFound: "Direktori pemasangan tidak wujud"\nAppStoreIsUpToDate: "Terkini"\nLocalAppVersionNull: "Aplikasi {{.name}} tidak diselaraskan ke versi! Tidak dapat menambah ke senarai aplikasi"\nLocalAppVersionErr: "{{.name}} gagal diselaraskan ke versi {{.version}}! {{.err}}"\nErrFileNotFound: "Fail {{.name}} tidak wujud"\nErrFileParseApp: "Gagal menganalisis fail {{.name}} {{.err}}"\nErrAppDirNull: "Folder versi tidak wujud"\nLocalAppErr: "Penyelarasan aplikasi {{.name}} gagal! {{.err}}"\nErrContainerName: "Nama Kontena sudah wujud"\nErrAppSystemRestart: "Pemulangan semula 1Panel menyebabkan tugas terhenti"\nErrCreateHttpClient: "Gagal mencipta permintaan HTTP {{.err}}"\nErrHttpReqTimeOut: "Permintaan tamat masa {{.err}}"\nErrHttpReqFailed: "Permintaan gagal {{.err}}"\nErrHttpReqNotFound: "Fail tidak wujud"\nErrNoSuchHost: "Sambungan rangkaian gagal"\nErrImagePullTimeOut: "Tarik imej tamat masa"\nErrContainerNotFound: "Kontena {{ .name }} tidak wujud"\nErrContainerMsg: "Kontena {{ .name }} bermasalah, sila semak log pada halaman kontena untuk maklumat lanjut"\nErrAppBackup: "Sandaran aplikasi {{ .name }} gagal err {{.err}}"\nErrImagePull: "Tarik imej {{ .name }} gagal err {{.err}}"\nErrVersionTooLow: "Versi 1Panel semasa terlalu rendah untuk mengemas kini gedung aplikasi, sila tingkatkan versi"\nErrAppNameExist: "Nama aplikasi sudah wujud"\nAppStoreIsSyncing: "Gedung Aplikasi sedang menyelaraskan, sila cuba lagi nanti"\nErrGetCompose: "Gagal mendapatkan fail docker-compose.yml! {{ .detail }}"\nErrAppWarn: "Status tidak normal, sila semak log"\nErrAppParamKey: "Pengecualian medan parameter {{ .name }}"\nErrAppUpgrade: "Gagal menaik taraf aplikasi {{ .name }} {{ .err }}"\nAppRecover: "Aplikasi {{ .name }} telah dikembalikan"\nPullImageStart: "Memulakan penarikan imej {{ .name }}"\nPullImageSuccess: "Imej berjaya ditarik"\nUpgradeAppStart: "Memulakan peningkatan aplikasi {{ .name }}"\nUpgradeAppSuccess: "Aplikasi {{ .name }} berjaya dinaik taraf"\n\n#file\nErrFileCanNotRead: "Fail tidak boleh dibaca"\nErrFileToLarge: "Fail terlalu besar"\nErrPathNotFound: "Laluan tidak dijumpai"\nErrMovePathFailed: "Laluan sasaran tidak boleh mengandungi laluan asal!"\nErrLinkPathNotFound: "Laluan sasaran tidak wujud!"\nErrFileIsExist: "Fail atau direktori sudah wujud!"\nErrFileUpload: "Gagal memuat naik fail {{.name}} {{.detail}}"\nErrFileDownloadDir: "Muat turun folder tidak disokong"\nErrCmdNotFound: "Arahan {{ .name }} tidak wujud, sila pasang arahan ini pada hos terlebih dahulu"\nErrSourcePathNotFound: "Direktori sumber tidak wujud"\nErrFavoriteExist: "Laluan ini telah dikumpulkan"\nErrInvalidChar: "Aksara tidak sah adalah dilarang"\nErrPathNotDelete: "Direktori yang dipilih tidak boleh dipadam"\n\n#website\nErrDomainIsExist: "Nama domain sudah wujud"\nErrAliasIsExist: "Alias sudah wujud"\nErrAppDelete: "Laman web lain menggunakan aplikasi ini"\nErrGroupIsUsed: "Kumpulan sedang digunakan dan tidak boleh dipadam"\nErrBackupMatch: "Fail sandaran tidak sepadan dengan data separa laman web semasa: {{ .detail }}"\nErrBackupExist: "Fail sandaran berkaitan dengan sebahagian data asal yang tidak wujud: {{ .detail }}"\nErrPHPResource: "Runtime tempatan tidak menyokong penukaran!"\nErrPathPermission: "Folder dengan kebenaran bukan 1000:1000 dikesan dalam direktori indeks, yang mungkin menyebabkan ralat 'Access denied' semasa mengakses laman web. Sila klik butang simpan di atas"\nErrDomainIsUsed: "Domain sudah digunakan oleh laman web {{ .name }}"\nErrDomainFormat: "Format domain {{ .name }} adalah salah"\nErrDefaultAlias: "default ialah nama kod terpelihara, sila gunakan nama kod lain"\nErrImageNotExist: "Imej persekitaran runtime {{.name}} tidak wujud, sila sunting semula persekitaran runtime"\n\n#ssl\nErrSSLCannotDelete: "Sijil {{ .name }} sedang digunakan oleh laman web dan tidak boleh dihapuskan"\nErrAccountCannotDelete: "Sijil yang dikaitkan dengan akaun tidak boleh dihapuskan"\nErrSSLApply: "Sijil berjaya terus ditandatangani, tetapi reload OpenResty gagal, sila semak konfigurasi!"\nErrEmailIsExist: "Emel sudah wujud"\nErrSSLKeyNotFound: "Fail kunci peribadi tidak wujud"\nErrSSLCertificateNotFound: "Fail sijil tidak wujud"\nErrSSLKeyFormat: "Ralat pengesahan fail kunci peribadi"\nErrSSLCertificateFormat: "Format fail sijil salah, sila gunakan format PEM"\nErrEabKidOrEabHmacKeyCannotBlank: "EabKid atau EabHmacKey tidak boleh kosong"\nErrOpenrestyNotFound: "Mod HTTP memerlukan OpenResty dipasang terlebih dahulu"\nApplySSLStart: "Memulakan permohonan sijil, nama domain [{{ .domain }}], kaedah permohonan [{{ .type }}]"\ndnsAccount: "DNS Automatik"\ndnsManual: "DNS Manual"\nhttp: "HTTP"\nApplySSLFailed: "Permohonan sijil untuk [{{ .domain }}] gagal, {{.detail}}"\nApplySSLSuccess: "Permohonan sijil untuk [{{ .domain }}] berjaya!"\nDNSAccountName: "Akaun DNS [{{ .name }}], pengeluar [{{ .type }}]"\nPushDirLog: "Sijil dihantar ke direktori [{{ .path }}] {{ .status }}"\nErrDeleteCAWithSSL: "Terdapat sijil yang telah dikeluarkan di bawah organisasi semasa dan tidak boleh dihapuskan"\nErrDeleteWithPanelSSL: "Konfigurasi SSL panel menggunakan sijil ini dan tidak boleh dihapuskan"\nErrDefaultCA: "Pihak Berkuasa Sijil lalai tidak boleh dihapuskan"\nApplyWebSiteSSLLog: "Memulakan kemas kini sijil laman web {{ .name }}"\nErrUpdateWebsiteSSL: "Sijil laman web {{ .name }} gagal dikemas kini: {{ .err }}"\nApplyWebSiteSSLSuccess: "Sijil laman web berjaya dikemas kini"\nErrExecShell: "Pelaksanaan skrip gagal {{ .err }}"\nExecShellStart: "Memulakan pelaksanaan skrip"\nExecShellSuccess: "Skrip berjaya dilaksanakan"\nStartUpdateSystemSSL: "Memulakan kemas kini sijil sistem"\nUpdateSystemSSLSuccess: "Sijil sistem berjaya dikemas kini"\n\n#mysql\nErrUserIsExist: "Pengguna semasa sudah wujud. Sila masukkan pengguna baharu"\nErrDatabaseIsExist: "Pangkalan data semasa sudah wujud. Sila masukkan pangkalan data baharu"\nErrExecTimeOut: "Pelaksanaan SQL tamat masa, sila semak pangkalan data"\nErrRemoteExist: "Pangkalan data jauh dengan nama tersebut sudah wujud, sila ubah dan cuba lagi"\nErrLocalExist: "Pangkalan data tempatan dengan nama tersebut sudah wujud, sila ubah dan cuba lagi"\n\n#redis\nErrTypeOfRedis: "Jenis fail pemulihan tidak sepadan dengan mod ketekalan semasa. Sila ubah jenis fail dan cuba lagi."\n\n#container\nErrInUsed: "{{ .detail }} sedang digunakan dan tidak boleh dihapuskan"\nErrObjectInUsed: "Objek ini sedang digunakan dan tidak boleh dihapuskan"\nErrObjectBeDependent: "Imej ini bergantung kepada imej lain dan tidak boleh dihapuskan"\nErrPortRules: "Bilangan port tidak sepadan, sila masukkan semula!"\nErrPgImagePull: "Tarik imej tamat masa. Sila konfigurasikan pecutan imej atau tarik imej postgres:16.0-alpine secara manual dan cuba lagi"\n\n#runtime\nErrDirNotFound: "Folder binaan tidak wujud! Sila semak integriti fail!"\nErrFileNotExist: "Fail {{ .detail }} tidak wujud! Sila semak integriti fail sumber!"\nErrImageBuildErr: "Binaan imej gagal"\nErrImageExist: "Imej sudah wujud!"\nErrDelWithWebsite: "Persekitaran operasi telah dikaitkan dengan laman web dan tidak boleh dihapuskan"\nErrRuntimeStart: "Gagal memulakan"\nErrPackageJsonNotFound: "Fail package.json tidak wujud"\nErrScriptsNotFound: "Tiada item konfigurasi skrip dijumpai dalam package.json"\nErrContainerNameNotFound: "Tidak dapat mendapatkan nama kontena, sila semak fail .env"\nErrNodeModulesNotFound: "Folder node_modules tidak wujud! Sila sunting persekitaran operasi atau tunggu sehingga persekitaran operasi berjaya dimulakan"\n\n#setting\nErrBackupInUsed: "Akaun sandaran sedang digunakan dalam cronjob dan tidak boleh dihapuskan."\nErrBackupCheck: "Ujian sambungan akaun sandaran gagal {{ .err }}"\nErrOSSConn: "Tidak dapat mendapatkan versi terkini, sila semak sama ada pelayan boleh menyambung ke rangkaian luaran."\nErrEntrance: "Ralat maklumat pintu masuk keselamatan. Sila semak dan cuba lagi!"\n\n#tool\nErrConfigNotFound: "Fail konfigurasi tidak wujud"\nErrConfigParse: "Format fail konfigurasi salah"\nErrConfigIsNull: "Fail konfigurasi tidak boleh kosong"\nErrConfigDirNotFound: "Direktori operasi tidak wujud"\nErrConfigAlreadyExist: "Fail konfigurasi dengan nama yang sama sudah wujud"\nErrUserFindErr: "Gagal mencari pengguna {{ .name }} {{ .err }}"\n\n#ssh\nErrFirewallNone: "Tiada perkhidmatan firewalld atau ufw dikesan pada sistem. Sila semak dan cuba lagi!"\nErrFirewallBoth: "Kedua-dua perkhidmatan firewalld dan ufw dikesan pada sistem. Untuk mengelakkan konflik, sila nyahpasang salah satu dan cuba lagi!"\n\n#cronjob\nErrBashExecute: "Ralat pelaksanaan skrip, sila semak maklumat khusus dalam kawasan teks output tugas."\nErrCutWebsiteLog: "Pemotongan log laman web {{ .name }} gagal, ralat {{ .err }}"\nCutWebsiteLogSuccess: "Log laman web {{ .name }} berjaya dipotong, laluan sandaran {{ .path }}"\n\n#toolbox\nErrNotExistUser: "Pengguna semasa tidak wujud. Sila ubah dan cuba lagi!"\nErrBanAction: "Tetapan gagal, perkhidmatan {{ .name }} semasa tidak tersedia, sila semak dan cuba lagi!"\nErrClamdscanNotFound: "Perintah clamdscan tidak dikesan, sila rujuk dokumentasi untuk memasangnya!"\n\n#waf\nErrScope: "Pengubahsuaian konfigurasi ini tidak disokong"\nErrStateChange: "Pengubahsuaian status gagal"\nErrRuleExist: "Peraturan sudah wujud"\nErrRuleNotExist: "Peraturan tidak wujud"\nErrParseIP: "Format IP salah"\nErrDefaultIP: "default adalah nama terpelihara, sila tukar kepada nama lain"\nErrGroupInUse: "Kumpulan IP sedang digunakan oleh senarai hitam/putih dan tidak boleh dihapuskan"\nErrGroupExist: "Nama kumpulan IP sudah wujud"\nErrIPRange: "Julat IP salah"\nErrIPExist: "IP sudah wujud"\n\n#license\nErrLicense: "Format lesen salah, sila semak dan cuba lagi!"\nErrLicenseCheck: "Pengesahan lesen gagal, sila semak dan cuba lagi!"\nErrLicenseSave: "Gagal menyimpan maklumat lesen, ralat {{ .err }}, sila cuba lagi!"\nErrLicenseSync: "Gagal menyelaraskan maklumat lesen, tiada maklumat lesen dikesan dalam pangkalan data!"\nErrXpackNotFound: "Bahagian ini adalah ciri edisi profesional, sila import lesen terlebih dahulu dalam Tetapan Panel - Antara Muka Lesen"\nErrXpackNotActive: "Bahagian ini adalah ciri edisi profesional, sila selaraskan status lesen terlebih dahulu dalam Tetapan Panel - Antara Muka Lesen"\nErrXpackOutOfDate: "Lesen semasa telah tamat tempoh, sila import semula lesen dalam Tetapan Panel - Antara Muka Lesen"\nErrXpackLost: "Lesen telah mencapai bilangan maksimum percubaan semula. Sila pergi ke halaman [Tetapan] [Lesen] dan klik butang selaraskan secara manual untuk memastikan ciri versi profesional berfungsi dengan baik."\nErrXpackTimeout: "Permintaan tamat masa, sambungan rangkaian mungkin tidak stabil, sila cuba lagi nanti!"\n\n#license\nErrAlert: "Format maklumat amaran salah, sila semak dan cuba lagi!"\nErrAlertPush: "Ralat penghantaran amaran, sila semak dan cuba lagi!"\nErrAlertSave: "Ralat penyimpanan amaran, sila semak dan cuba lagi!"\nErrAlertSync: "Ralat penyelarasan amaran, sila semak dan cuba lagi!"\nErrAlertRemote: "Ralat amaran jauh, sila semak dan cuba lagi!"\n\n#cmd\nAppVersion: "Versi aplikasi"\nAppCommands: "Arahan berkaitan aplikasi"\nAppInit: "Inisialisasi aplikasi"\nAppKeyVal: "Kunci aplikasi (hanya menyokong bahasa Inggeris)"\nAppCreateFileErr: "Gagal mencipta fail {{ .name }} {{ .err }}"\nAppCreateDirErr: "Gagal mencipta folder {{ .name }} {{ .err }}"\nAppMissKey: "Kunci aplikasi hilang, gunakan -k untuk menetapkan"\nAppMissVersion: "Versi aplikasi hilang, gunakan -v untuk menetapkan"\nAppVersionExist: "Versi sudah wujud!"\nAppCreateSuccessful: "Ciptaan berjaya!"\nAppWriteErr: "Penulisan fail {{ .name }} gagal {{ .err }}"\nSudoHelper: "Sila gunakan {{ .cmd }} atau tukar ke pengguna root"\nListenIPCommands: "Tukar IP mendengar"\nListenIPv4: "Mendengar pada IPv4"\nListenIPv6: "Mendengar pada IPv6"\nListenChangeSuccessful: "Tukar berjaya! Kini mendengar pada {{ .value }}"\nResetCommands: "Tetapkan semula maklumat sistem"\nResetMFA: "Batal pengesahan dua faktor 1Panel"\nResetHttps: "Batal log masuk HTTPS 1Panel"\nResetEntrance: "Batal pintu masuk keselamatan 1Panel"\nResetIPs: "Batal sekatan IP yang dibenarkan 1Panel"\nResetDomain: "Batal pengikatan domain 1Panel"\nRestoreCommands: "Pulihkan perkhidmatan dan data 1Panel"\nRestoreNoSuchFile: "Tiada fail tersedia untuk pemulihan"\nRestoreStep1: "(1/5) Memulakan pemulihan perkhidmatan dan data 1Panel daripada direktori {{ .name }}..."\nRestoreStep2: "(2/5) Pemulihan binari 1Panel berjaya"\nRestoreStep3: "(3/5) Pemulihan skrip 1Panel berjaya"\nRestoreStep4: "(4/5) Pemulihan perkhidmatan 1Panel berjaya"\nRestoreStep5: "(5/5) Pemulihan data 1Panel berjaya"\nRestoreSuccessful: "Pemulihan berjaya! Memulakan semula perkhidmatan, sila tunggu..."\nUpdateCommands: "Kemas kini maklumat panel"\nUpdateUser: "Kemas kini pengguna panel"\nUpdatePassword: "Kemas kini kata laluan panel"\nUpdatePort: "Kemas kini port panel"\nUpdateUserNull: "Ralat: Pengguna panel kosong!"\nUpdateUserBlank: "Ralat: Pengguna panel mengandungi ruang kosong!"\nUpdateUserFormat: "Ralat: Format pengguna panel tidak sah! Hanya menyokong huruf Inggeris, Cina, nombor, dan , dengan panjang 3-30 aksara"\nUpdateUserErr: "Ralat: Gagal mengemas kini pengguna panel, {{ .err }}"\nUpdateSuccessful: "Kemas kini berjaya!"\nUpdateUserResult: "Pengguna panel: {{ .name }}"\nUpdatePasswordRead: "Ralat: Gagal membaca maklumat kata laluan panel, {{ .err }}"\nUpdatePasswordNull: "Ralat: Kata laluan panel kosong!"\nUpdateUPasswordBlank: "Ralat: Kata laluan panel mengandungi ruang kosong!"\nUpdatePasswordFormat: "Ralat: Kata laluan panel hanya menyokong huruf, nombor, dan aksara khas !@#$%*,.?, dengan panjang 8-30 aksara!"\nUpdatePasswordLen: "Ralat: Sila masukkan kata laluan lebih panjang daripada 6 aksara!"\nUpdatePasswordRe: "Sahkan kata laluan:"\nUpdatePasswordErr: "Ralat: Gagal mengemas kini kata laluan panel, {{ .err }}"\nUpdatePasswordSame: "Ralat: Kedua-dua kata laluan tidak sepadan, sila semak dan cuba lagi!"\nUpdatePasswordResult: "Kata laluan panel: {{ .name }}"\nUpdatePortFormat: "Ralat: Nombor port yang dimasukkan mesti antara 1 hingga 65535!"\nUpdatePortUsed: "Ralat: Nombor port sudah digunakan, sila semak dan cuba lagi!"\nUpdatePortErr: "Ralat: Gagal mengemas kini port panel, {{ .err }}"\nUpdatePortResult: "Port Panel: {{ .name }}"\nUpdatePortFirewallAdd: "Gagal menambah peraturan port firewall, {{ .err }}, sila tambah port {{ .name }} secara manual ke dalam peraturan firewall."\nUpdatePortFirewallDel: "Ralat: Gagal memadam port firewall, {{ .err }}"\nUpdatePortFirewallReload: "Gagal memuat semula firewall, {{ .err }}, sila muat semula firewall secara manual."\nUserInfo: "Dapatkan maklumat panel"\nUserInfoAddr: "Alamat panel: "\nUserInfoPassHelp: "Petua: Untuk menukar kata laluan, anda boleh menjalankan arahan: "\nDBConnErr: "Ralat: Gagal memulakan sambungan pangkalan data, {{ .err }}"\nSystemVersion: "Versi: "\nSystemMode: "Mod: "\n\n#ai-tool\nErrOpenrestyInstall: 'Sila pasang Openresty terlebih dahulu'\nErrSSL: "Kandungan sijil kosong, sila periksa sijil!"\nErrSsePath: "Laluan SSE bertindan"\n\n#mobile app\nErrVerifyToken: 'Ralat pengesahan token, sila tetapkan semula dan imbas semula.'\nErrInvalidToken: 'Token tidak sah, sila tetapkan semula dan imbas semula.'\nErrExpiredToken: 'Token telah tamat tempoh, sila tetapkan semula dan imbas semula.'\n | dataset_sample\yaml\1Panel-dev_1Panel\backend\i18n\lang\ms.yml | ms.yml | YAML | 17,350 | 0.95 | 0 | 0.072727 | python-kit | 121 | 2025-04-04T07:14:14.965835 | Apache-2.0 | false | 8838d94fb4afc1f95d997ee0b6d44fe9 |
additionalProperties:\n key: # The application's key, limited to English, used to create a folder in Linux\n name: # Application name\n tags:\n - Tool # Application tags, multiple tags are allowed. Refer to the tag list below\n shortDescZh: # Application description in Chinese, no more than 30 characters\n shortDescEn: # Application description in English\n type: tool # Application type, different from application category, only one is allowed. Refer to the type list below\n crossVersionUpdate: # Whether cross-major-version upgrades are supported\n limit: # Application installation limit, 0 means no limit\n website: # Official website address\n github: # GitHub address\n document: # Documentation address\n | dataset_sample\yaml\1Panel-dev_1Panel\cmd\server\app\app_config.yml | app_config.yml | YAML | 730 | 0.8 | 0 | 0 | node-utils | 234 | 2025-05-24T14:11:00.516021 | BSD-3-Clause | false | d17162551ffa52bb915909237951b65a |
additionalProperties:\n formFields:\n - default: 8080\n edit: true\n envKey: PANEL_APP_PORT_HTTP\n labelEn: Port\n labelZh: 端口\n required: true\n rule: paramPort\n type: number\n | dataset_sample\yaml\1Panel-dev_1Panel\cmd\server\app\app_param.yml | app_param.yml | YAML | 211 | 0.85 | 0 | 0 | python-kit | 451 | 2024-01-13T02:27:28.795747 | BSD-3-Clause | false | c3a6f72b48c6c6ebd03cf68dc68c043d |
services:\n mcp-server:\n image: supercorp/supergateway:latest\n container_name: ${CONTAINER_NAME}\n restart: unless-stopped\n ports:\n - "${HOST_IP}:${PANEL_APP_PORT_HTTP}:${PANEL_APP_PORT_HTTP}"\n command: [\n "--stdio", "${COMMAND}",\n "--port", "${PANEL_APP_PORT_HTTP}",\n "--baseUrl", "${BASE_URL}",\n "--ssePath", "${SSE_PATH}",\n "--messagePath", "${SSE_PATH}/messages"\n ]\n networks:\n - 1panel-network\nnetworks:\n 1panel-network:\n external: true | dataset_sample\yaml\1Panel-dev_1Panel\cmd\server\mcp\compose.yml | compose.yml | YAML | 500 | 0.7 | 0 | 0 | vue-tools | 230 | 2024-06-24T00:39:24.685959 | MIT | false | 407fbbaf7e60258b994b5345d6950c17 |
codecov:\n branch: dev\n require_ci_to_pass: yes\n allow_coverage_offsets: true\n status:\n project:\n default:\n threshold: 1%\n | dataset_sample\yaml\abpframework_abp\codecov.yml | codecov.yml | YAML | 140 | 0.85 | 0 | 0 | react-lib | 794 | 2025-05-27T09:46:22.293800 | BSD-3-Clause | false | a875a12d50c98daa2716baed5ec61ddc |
ui-angular:\n - npm/ng-packs/*\n - npm/ng-packs/**/*\n - npm/ng-packs/**/**/*\n - npm/ng-packs/**/**/**/*\n - npm/ng-packs/**/**/**/**/*\n - npm/ng-packs/**/**/**/**/**/*\n - templates/app/angular/*\n - templates/app/angular/**/*\n - templates/app/angular/**/**/*\n - templates/app/angular/**/**/**/*\n - templates/module/angular/*\n - templates/module/angular/**/*\n - templates/module/angular/**/**/*\n - templates/module/angular/**/**/**/*\n | dataset_sample\yaml\abpframework_abp\.github\labeler.yml | labeler.yml | YAML | 443 | 0.8 | 0 | 0 | react-lib | 334 | 2024-12-25T09:07:44.138646 | MIT | false | cb64f16a92be41346471453707f419dd |
# Configuration for Lock Threads - https://github.com/dessant/lock-threads-app\n\n# Number of days of inactivity before a closed issue or pull request is locked\ndaysUntilLock: 30\n\n# Skip issues and pull requests created before a given timestamp. Timestamp must\n# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable\nskipCreatedBefore: false\n\n# Issues and pull requests with these labels will be ignored. Set to `[]` to disable\nexemptLabels: []\n\n# Label to add before locking, such as `outdated`. Set to `false` to disable\nlockLabel: false\n\n# Comment to post before locking. Set to `false` to disable\nlockComment: >\n This thread has been automatically locked since there has not been\n any recent activity after it was closed. Please open a new issue for\n related bugs.\n\n# Assign `resolved` as the reason for locking. Set to `false` to disable\nsetLockReason: true\n\n# Limit to only `issues` or `pulls`\n# only: issues\n\n# Optionally, specify configuration settings just for `issues` or `pulls`\n# issues:\n# exemptLabels:\n# - help-wanted\n# lockLabel: outdated\n\n# pulls:\n# daysUntilLock: 30\n\n# Repository to extend settings from\n# _extends: repo | dataset_sample\yaml\abpframework_abp\.github\lock.yml | lock.yml | YAML | 1,153 | 0.8 | 0.108108 | 0.678571 | react-lib | 40 | 2024-11-20T08:58:30.653119 | BSD-3-Clause | false | a9bec34f3213c79adb1a4ca1118dc566 |
# Number of days of inactivity before an issue becomes stale\ndaysUntilStale: 60\n# Number of days of inactivity before a stale issue is closed\ndaysUntilClose: 7\n# Set to true to ignore issues in a milestone (defaults to false)\nexemptMilestones: true\n# Label to use when marking an issue as stale\nstaleLabel: inactive\n# Comment to post when marking an issue as stale. Set to `false` to disable\nmarkComment: >\n This issue has been automatically marked as stale because it has not had\n recent activity. It will be closed if no further activity occurs. Thank you\n for your contributions.\n# Comment to post when closing a stale issue. Set to `false` to disable\ncloseComment: false | dataset_sample\yaml\abpframework_abp\.github\stale.yml | stale.yml | YAML | 677 | 0.8 | 0.142857 | 0.4 | node-utils | 587 | 2023-12-26T00:56:14.934518 | BSD-3-Clause | false | 23c88501cbb144c4e5e6bad1ea91a5da |
name: 🐞 Bug Report\ndescription: Create a report to help us improve\nlabels: [bug]\nbody:\n - type: markdown\n attributes:\n value: |\n We welcome bug reports! This template will help us gather the information we need to start the triage process.\n \n Please keep in mind that the GitHub issue tracker is not intended as a general support forum, but for reporting **non-security** bugs and feature requests.\n If you believe you have an issue that affects the SECURITY of the platform, please do NOT create an issue and instead email your issue details to info@abp.io.\n For other types of questions, consider using [StackOverflow](https://stackoverflow.com/questions/tagged/abp).\n - type: checkboxes\n id: searched\n attributes:\n label: Is there an existing issue for this?\n description: Please search to see if an issue already exists for the bug you encountered ([abp/issues](https://github.com/abpframework/abp/issues)).\n options:\n - label: I have searched the existing issues\n required: true\n - type: textarea\n id: background\n attributes:\n label: Description\n description: Please share a clear and concise description of the problem.\n placeholder: Description\n validations:\n required: true\n - type: textarea\n id: repro-steps\n attributes:\n label: Reproduction Steps\n description: |\n Please include minimal steps to reproduce the problem if possible. E.g.: the smallest possible code snippet; or a small project, with steps to run it. If possible include text as text rather than screenshots (so it shows up in searches).\n placeholder: Minimal Reproduction\n validations:\n required: false\n - type: textarea\n id: expected-behavior\n attributes:\n label: Expected behavior\n description: |\n Provide a description of the expected behavior.\n placeholder: Expected behavior\n validations:\n required: false\n - type: textarea\n id: actual-behavior\n attributes:\n label: Actual behavior\n description: |\n Provide a description of the actual behavior observed. If applicable please include any error messages, exception stacktraces or memory dumps.\n placeholder: Actual behavior\n validations:\n required: false\n - type: textarea\n id: regression\n attributes:\n label: Regression?\n description: |\n Did this work in a previous build or release of ABP framework? If you can try a previous release or build to find out, that can help us narrow down the problem. If you don't know, that's OK.\n placeholder: Regression?\n validations:\n required: false\n - type: textarea\n id: known-workarounds\n attributes:\n label: Known Workarounds\n description: |\n Please provide a description of any known workarounds.\n placeholder: Known Workarounds\n validations:\n required: false\n - type: markdown\n attributes:\n value: |\n ## Configuration\n Please provide more information on your ABP configuration.\n - type: input\n id: version\n attributes:\n label: Version\n description: Which version of ABP is the code running on?\n placeholder: Version\n validations:\n required: true\n - type: dropdown\n id: user-interface\n attributes:\n label: User Interface\n description: Which user interface of ABP is related to the problem?\n options:\n - Common (Default)\n - MVC\n - Angular\n - Blazor\n - Blazor Server\n - React Native\n - MAUI\n validations:\n required: true\n - type: dropdown\n id: database-provider\n attributes:\n label: Database Provider\n description: Which database provider of ABP is used?\n options:\n - EF Core (Default)\n - MongoDB\n - None/Others\n validations:\n required: true\n - type: dropdown\n id: structure\n attributes:\n label: Tiered or separate authentication server\n description: Which structure of ABP is specified?\n options:\n - None (Default)\n - Tiered\n - Separate Auth Server\n validations:\n required: true\n - type: dropdown\n id: Operation-System\n attributes:\n label: Operation System\n description: What is the operation system of the server?\n options:\n - Windows (Default)\n - Linux\n - macOS\n - Others\n validations:\n required: true\n - type: markdown\n attributes:\n value: |\n ---\n - type: textarea\n id: other-info\n attributes:\n label: Other information\n description: |\n If you have an idea where the problem might lie, let us know that here. Please include any pointers to code, relevant changes, or related issues you know of.\n placeholder: Other information\n validations:\n required: false\n | dataset_sample\yaml\abpframework_abp\.github\ISSUE_TEMPLATE\01_bug_report.yml | 01_bug_report.yml | YAML | 4,871 | 0.95 | 0.040541 | 0.006803 | python-kit | 601 | 2025-05-13T06:14:15.217351 | GPL-3.0 | false | ab1c54ea3e596340b55290f10e352cf7 |
name: 💡 Feature request\ndescription: Suggest an idea for this project\nlabels: [feature-request]\nbody:\n- type: checkboxes\n attributes:\n label: Is there an existing issue for this?\n description: Please search to see if an issue already exists for the feature you are requesting. (https://github.com/abpframework/abp/issues).\n options:\n - label: I have searched the existing issues\n required: true\n- type: textarea\n attributes:\n label: Is your feature request related to a problem? Please describe the problem.\n description: A clear and concise description of what the problem is.\n placeholder: I am trying to do [...] but [...]\n validations:\n required: false\n- type: textarea\n attributes:\n label: Describe the solution you'd like\n description: |\n A clear and concise description of what you want to happen. Include any alternative solutions you've considered.\n placeholder: I would like to see [...]\n validations:\n required: true\n- type: textarea\n attributes:\n label: Additional context\n description: |\n Add any other context or screenshots about the feature request here.\n placeholder: Add any other context or screenshots about the feature request here.\n validations:\n required: false\n | dataset_sample\yaml\abpframework_abp\.github\ISSUE_TEMPLATE\02_feature_request.yml | 02_feature_request.yml | YAML | 1,258 | 0.95 | 0.117647 | 0 | vue-tools | 227 | 2024-06-21T00:01:11.475488 | MIT | false | fa4b2f1ad161b003ee0577c9c0c0d682 |
name: 🤠 ABP Studio\ndescription: Create a report to help us improve the ABP Studio\nlabels: [studio]\nbody:\n - type: markdown\n attributes:\n value: |\n We welcome bug reports! This template will help us gather the information we need to start the triage process.\n \n Please keep in mind that the GitHub issue tracker is not intended as a general support forum, but for reporting **non-security** bugs and feature requests.\n If you believe you have an issue that affects the SECURITY of the platform, please do NOT create an issue and instead email your issue details to info@abp.io.\n For other types of questions, consider using [StackOverflow](https://stackoverflow.com/questions/tagged/abp).\n - type: checkboxes\n id: searched\n attributes:\n label: Is there an existing issue for this?\n description: Please search to see if an issue already exists for the bug you encountered or feature request ([abp/issues](https://github.com/abpframework/abp/issues?q=is%3Aopen+is%3Aissue+label%3Astudio)).\n options:\n - label: I have searched the existing issues\n required: true\n - type: textarea\n id: background\n attributes:\n label: Description\n description: Please share a clear and concise description of the problem.\n placeholder: Description\n validations:\n required: true\n - type: markdown\n attributes:\n value: |\n ## Setup\n Please provide more information on your ABP Studio setup.\n - type: input\n id: version\n attributes:\n label: Version\n description: Which version of ABP Studio are you using?\n placeholder: Version\n validations:\n required: true\n - type: dropdown\n id: Operation-System\n attributes:\n label: Operation System\n description: What is the operation system of the computer?\n options:\n - Windows (Default)\n - Linux\n - macOS\n - Others\n validations:\n required: true\n - type: textarea\n id: solution-config\n attributes:\n label: Solution Configuration\n description: |\n If there is an open solution, what are the configurations of the solution? \n 🧐 Hint: You can see all the information about your solution from the configuration window, which opens when you right-click on the [solution](https://abp.io/docs/latest/studio/solution-explorer#solution) and click on the `Solution Configuration` button.\n placeholder: |\n - **Template**: app\n - **Created ABP Studio Version**: 0.7.9\n - **Tiered**: No\n - **UI Framework**: mvc\n - **Theme**: leptonx\n - **Theme Style**: system\n - **Database Provider**: ef\n - **Database Management System**: sqlserver\n - **Separate Tenant Schema**: No\n - **Mobile Framework**: none\n - **Public Website**: No\n - **Optional Modules**:\n * GDPR\n * TextTemplateManagement\n * LanguageManagement\n * AuditLogging\n * SaaS\n * OpenIddictAdmin\n validations:\n required: false \n - type: markdown\n attributes:\n value: |\n ---\n - type: textarea\n id: other-info\n attributes:\n label: Other information\n description: |\n If you have an idea where the problem might lie, let us know that here. Please include any pointers to code, relevant changes, or related issues you know of.\n placeholder: Other information\n validations:\n required: false\n | dataset_sample\yaml\abpframework_abp\.github\ISSUE_TEMPLATE\03_studio.yml | 03_studio.yml | YAML | 3,503 | 0.95 | 0.042553 | 0.075269 | vue-tools | 410 | 2024-11-14T22:44:26.851350 | BSD-3-Clause | false | 2cf70c47dd7a67ce9dbe79b8de94c139 |
name: 💎 Article request\ndescription: Article suggestion you want to be published on community.abp.io\nlabels: [community-article-request]\nbody:\n - type: checkboxes\n id: searched\n attributes:\n label: Is there an existing article or article request for this?\n description: Please search to see if there is an article or article request related to your article request ([abp.io/community](https://abp.io/community/articles), [abp/issues](https://github.com/abpframework/abp/issues?q=is%3Aopen+is%3Aissue+label%3Acommunity-article-request))\n options:\n - label: I have searched the existing resources\n required: true\n - type: textarea\n attributes:\n label: Describe the article you'd like\n description: |\n Please describe the article you'd like to be published on community.abp.io.\n If you have any reference article, please share it here.\n validations:\n required: true\n | dataset_sample\yaml\abpframework_abp\.github\ISSUE_TEMPLATE\04_article_request.yml | 04_article_request.yml | YAML | 936 | 0.95 | 0.1 | 0 | vue-tools | 432 | 2025-03-17T11:13:16.478614 | BSD-3-Clause | false | bde2ad0eee1e5b7f83bb8564e2cb70e8 |
blank_issues_enabled: false\ncontact_links:\n - name: Issue with ABP Commercial\n url: https://abp.io/support/questions\n about: Please open ABP Commercial related issues at https://abp.io/support/questions.\n - name: Ask a question (community support)\n url: https://stackoverflow.com/questions/tagged/abp\n about: Ask a question that will be answered by the ABP community\n | dataset_sample\yaml\abpframework_abp\.github\ISSUE_TEMPLATE\config.yml | config.yml | YAML | 383 | 0.8 | 0 | 0 | vue-tools | 451 | 2025-06-09T00:39:24.023215 | GPL-3.0 | false | f44346f5ad06f15a89d407f3035c25c6 |
name: 'Angular'\non:\n pull_request:\n paths:\n - 'npm/ng-packs/**/*.ts'\n - 'npm/ng-packs/**/*.html'\n - 'npm/ng-packs/*.json'\n - '!npm/ng-packs/scripts/**'\n - '!npm/ng-packs/packages/schematics/**'\n branches:\n - 'rel-*'\n - 'dev'\n types:\n - opened\n - synchronize\n - reopened\n - ready_for_review\npermissions:\n contents: read\n\njobs:\n build-test-lint:\n if: ${{ !github.event.pull_request.draft }}\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v2\n with:\n fetch-depth: 0\n\n - uses: actions/cache@v4\n with:\n path: 'npm/ng-packs/node_modules'\n key: ${{ runner.os }}-${{ hashFiles('npm/ng-packs/yarn.lock') }}\n\n - uses: actions/cache@v4\n with:\n path: 'templates/app/angular/node_modules'\n key: ${{ runner.os }}-${{ hashFiles('templates/app/angular/yarn.lock') }}\n\n - name: Install packages\n run: yarn install\n working-directory: npm/ng-packs\n\n - name: Run lint\n run: yarn affected:lint --base=remotes/origin/${{ github.base_ref }}\n working-directory: npm/ng-packs\n\n - name: Run build\n run: yarn affected:build --base=remotes/origin/${{ github.base_ref }}\n working-directory: npm/ng-packs\n\n - name: Run test\n run: yarn affected:test --base=remotes/origin/${{ github.base_ref }}\n working-directory: npm/ng-packs\n | dataset_sample\yaml\abpframework_abp\.github\workflows\angular.yml | angular.yml | YAML | 1,441 | 0.8 | 0.018519 | 0 | node-utils | 746 | 2024-05-02T16:48:37.564752 | Apache-2.0 | false | 16da87412564f3e758ae33495906a05e |
name: Merge branch dev with rel-9.2\non:\n push:\n branches:\n - rel-9.2\npermissions:\n contents: read\n\njobs:\n merge-dev-with-rel-9-2:\n permissions:\n contents: write # for peter-evans/create-pull-request to create branch\n pull-requests: write # for peter-evans/create-pull-request to create a PR\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v2\n with:\n ref: dev\n - name: Reset promotion branch\n run: |\n git fetch origin rel-9.2:rel-9.2\n git reset --hard rel-9.2\n - name: Create Pull Request\n uses: peter-evans/create-pull-request@v3\n with:\n branch: auto-merge/rel-9-2/${{github.run_number}}\n title: Merge branch dev with rel-9.2\n body: This PR generated automatically to merge dev with rel-9.2. Please review the changed files before merging to prevent any errors that may occur.\n reviewers: maliming\n draft: true\n token: ${{ github.token }}\n - name: Merge Pull Request\n env:\n GH_TOKEN: ${{ secrets.BOT_SECRET }}\n run: |\n gh pr ready\n gh pr review auto-merge/rel-9-2/${{github.run_number}} --approve\n gh pr merge auto-merge/rel-9-2/${{github.run_number}} --merge --auto --delete-branch\n | dataset_sample\yaml\abpframework_abp\.github\workflows\auto-pr.yml | auto-pr.yml | YAML | 1,304 | 0.8 | 0.052632 | 0 | react-lib | 311 | 2023-10-31T19:39:04.460358 | GPL-3.0 | false | 2b84c2b440f20c62e4a4369701800927 |
name: cancel-workflow\non: [push]\npermissions:\n contents: read\n\njobs:\n cancel:\n permissions:\n actions: write # for styfle/cancel-workflow-action to cancel/stop running workflows\n name: 'Cancel Previous Runs'\n runs-on: ubuntu-latest\n timeout-minutes: 3\n steps:\n - uses: styfle/cancel-workflow-action@0.6.0\n with:\n workflow_id: 10629,1299107,2792859,8268314\n access_token: ${{ github.token }}\n | dataset_sample\yaml\abpframework_abp\.github\workflows\cancel-workflow.yml | cancel-workflow.yml | YAML | 443 | 0.8 | 0.058824 | 0 | react-lib | 568 | 2025-02-01T18:31:15.236983 | MIT | false | 34806255d2f189bbd745191261d8ee20 |
# For most projects, this workflow file will not need changing; you simply need\n# to commit it to your repository.\n#\n# You may wish to alter this file to override the set of languages analyzed,\n# or to provide custom queries or build logic.\nname: "CodeQL"\n\non:\n push:\n branches: [dev, rel-*]\n paths:\n - "abp/**/*.js"\n - "abp/**/*.cs"\n - "abp/**/*.cshtml"\n - "abp/**/*.csproj"\n - "abp/**/*.razor"\n pull_request:\n # The branches below must be a subset of the branches above\n branches: [dev]\n paths:\n - "abp/**/*.js"\n - "abp/**/*.cs"\n - "abp/**/*.cshtml"\n - "abp/**/*.csproj"\n - "abp/**/*.razor"\n types:\n - opened\n - synchronize\n - reopened\n - ready_for_review\n\npermissions:\n contents: read\n\njobs:\n analyze:\n if: ${{ !github.event.pull_request.draft }}\n permissions:\n actions: read # for github/codeql-action/init to get workflow details\n contents: read # for actions/checkout to fetch code\n security-events: write # for github/codeql-action/autobuild to send a status report\n name: Analyze\n runs-on: ubuntu-latest\n\n strategy:\n fail-fast: false\n matrix:\n # Override automatic language detection by changing the below list\n # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']\n language: ["csharp", "javascript"]\n # Learn more...\n # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection\n\n steps:\n - name: Checkout repository\n uses: actions/checkout@v2\n with:\n # We must fetch at least the immediate parents so that if this is\n # a pull request then we can checkout the head.\n fetch-depth: 2\n\n # If this run was triggered by a pull request event, then checkout\n # the head of the pull request instead of the merge commit.\n - run: git checkout HEAD^2\n if: ${{ github.event_name == 'pull_request' }}\n\n # Initializes the CodeQL tools for scanning.\n - name: Initialize CodeQL\n uses: github/codeql-action/init@v1\n with:\n languages: ${{ matrix.language }}\n # If you wish to specify custom queries, you can do so here or in a config file.\n # By default, queries listed here will override any specified in a config file.\n # Prefix the list here with "+" to use these queries and those in the config file.\n # queries: ./path/to/local/query, your-org/your-repo/queries@main\n\n # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).\n # If this step fails, then you should remove it and run the build manually (see below)\n - name: Autobuild\n uses: github/codeql-action/autobuild@v1\n\n # ℹ️ Command-line programs to run using the OS shell.\n # 📚 https://git.io/JvXDl\n\n # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines\n # and modify them (or add more) to build your code if your project\n # uses a compiled language\n\n #- run: |\n # make bootstrap\n # make release\n\n - name: Perform CodeQL Analysis\n uses: github/codeql-action/analyze@v1\n | dataset_sample\yaml\abpframework_abp\.github\workflows\codeql-analysis.yml | codeql-analysis.yml | YAML | 3,304 | 0.8 | 0.085106 | 0.353659 | vue-tools | 954 | 2024-08-16T02:48:13.833400 | GPL-3.0 | false | 41c56030d154877148d0b43a5477938d |
name: Compress Images\non:\n pull_request:\n paths:\n - "**.jpg"\n - "**.jpeg"\n - "**.png"\n - "**.webp"\n - "**.gif"\n types:\n - opened\n - synchronize\n - reopened\n - ready_for_review\njobs:\n build:\n if: github.event.pull_request.head.repo.full_name == github.repository && !github.event.pull_request.draft\n name: calibreapp/image-actions\n runs-on: ubuntu-latest\n steps:\n - name: Checkout Repo\n uses: actions/checkout@v2\n\n - name: Compress Images\n uses: calibreapp/image-actions@main\n with:\n githubToken: ${{ secrets.GITHUB_TOKEN }}\n | dataset_sample\yaml\abpframework_abp\.github\workflows\image-compression.yml | image-compression.yml | YAML | 629 | 0.7 | 0.037037 | 0 | vue-tools | 818 | 2025-06-24T11:08:19.825774 | Apache-2.0 | false | 8d700b1ae0e034bef3ab237e2ad1f96e |
name: Pull request labeler\non:\n schedule:\n - cron: '0 12 */1 * *'\npermissions:\n contents: read\njobs:\n labeler:\n permissions:\n pull-requests: write\n runs-on: ubuntu-latest\n steps:\n - uses: paulfantom/periodic-labeler@master\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n GITHUB_REPOSITORY: ${{ github.repository }}\n | dataset_sample\yaml\abpframework_abp\.github\workflows\labeler.yml | labeler.yml | YAML | 367 | 0.7 | 0 | 0 | react-lib | 371 | 2025-02-06T16:44:00.062564 | BSD-3-Clause | false | 71c180f3249cbc5392d200caf2fc52c2 |
name: Create Release\n\non:\n workflow_dispatch:\n inputs:\n tag_name:\n description: 'Tag Name' \n required: true\n prerelease:\n description: 'Pre-release?' \n required: true\n branchName:\n description: 'Branch Name' \n required: true\n\njobs:\n build:\n runs-on: ubuntu-latest\n steps:\n - name: Checkout code\n uses: actions/checkout@v2\n with:\n ref: ${{ github.event.inputs.branchName }}\n\n - name: Create Release\n id: create_release\n uses: actions/create-release@v1\n env:\n GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}\n with:\n tag_name: ${{ github.event.inputs.tag_name }}\n release_name: ${{ github.event.inputs.tag_name }}\n draft: false\n prerelease: ${{ github.event.inputs.prerelease }}\n\n - name: Checkout code at tag\n uses: actions/checkout@v2\n with:\n ref: ${{ github.event.inputs.tag_name }}\n\n - name: Build Project\n run: |\n # add your build commands here, depending on your project's requirements.\n echo "Build project here"\n | dataset_sample\yaml\abpframework_abp\.github\workflows\publish-release.yml | publish-release.yml | YAML | 1,156 | 0.95 | 0 | 0.025641 | awesome-app | 366 | 2023-09-02T21:00:16.923967 | GPL-3.0 | false | 17d9b159435a61904a77baf60d3db2a5 |
name: Update Latest Versions\n\non:\n release:\n types:\n - published\n \npermissions:\n contents: write\n pull-requests: write\n\njobs:\n update-versions:\n runs-on: ubuntu-latest\n steps:\n - name: Checkout repository\n uses: actions/checkout@v2\n\n - name: Set up Python\n uses: actions/setup-python@v2\n with:\n python-version: 3.x\n\n - name: Install dependencies\n run: |\n python -m pip install --upgrade pip\n pip install PyGithub\n\n - name: Update latest-versions.json and create PR\n env:\n GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}\n run: |\n python .github/scripts/update_versions.py\n | dataset_sample\yaml\abpframework_abp\.github\workflows\update-versions.yml | update-versions.yml | YAML | 696 | 0.7 | 0 | 0 | python-kit | 80 | 2023-08-05T14:02:24.728640 | MIT | false | b2113cf402aa501f9bbeb7d6935e54cf |
trigger:\n tags:\n include:\n - "*.*.*"\n\nresources:\n repositories:\n - repository: devops\n type: github\n endpoint: github.com_skoc10\n name: volosoft/devops\n ref: master\n\nvariables:\n # Container registry service connection established during pipeline creation\n dockerRegistryServiceConnection: 'volosoft-reg1'\n workDir: '$(Build.SourcesDirectory)'\n bootstrapTaghelpersDir: '$(workDir)/abp/modules/basic-theme/test/Volo.Abp.AspNetCore.Mvc.UI.Bootstrap.Demo'\n # tag: $[replace(variables['Build.SourceBranch'], 'refs/tags/', '')]\n tag: $(Build.BuildNumber)\n DOCKER_BUILDKIT: 1\n\npool:\n name: aks-deployer-agent\n\n\nstages:\n- stage: Package\n displayName: Package\n jobs:\n - job: Build\n displayName: Package Helm Charts and Values\n pool:\n name: aks-deployer-agent\n steps:\n - checkout: self\n - checkout: devops\n\n # - script: |\n # cd $(bootstrapTaghelpersDir) && dotnet publish -c Release -o bin/Release/publish\n\n - task: Docker@2\n displayName: Build Image\n inputs:\n command: build\n repository: demo/bootstrap-taghelpers\n dockerfile: $(bootstrapTaghelpersDir)/Dockerfile.azure\n buildContext: $(workDir)\n containerRegistry: $(dockerRegistryServiceConnection)\n tags: |\n $(tag)\n\n - task: Docker@2\n displayName: Push Image\n inputs:\n command: push\n repository: demo/bootstrap-taghelpers\n containerRegistry: $(dockerRegistryServiceConnection)\n tags: |\n $(tag)\n\n - bash: |\n mkdir -p $(Build.SourcesDirectory)/devops/aks/versions\n\n\n cat <<EOF > $(Build.SourcesDirectory)/devops/aks/versions/bootstrap-taghelpers-version.yaml\n image:\n repository: volosoft.azurecr.io/demo/bootstrap-taghelpers\n tag: "$(tag)"\n EOF\n\n cat $(Build.SourcesDirectory)/devops/aks/versions/bootstrap-taghelpers-version.yaml >> $(Build.SourcesDirectory)/devops/aks/helm/values/app/demo/bootstrap-taghelpers.abp.io.yaml\n\n - task: PublishBuildArtifacts@1\n displayName: 'Publish Artifact: bootstrap-taghelpers'\n inputs:\n PathtoPublish: '$(Build.SourcesDirectory)/devops/aks/helm'\n ArtifactName: 'bootstrap-taghelpers'\n\n\n | dataset_sample\yaml\abpframework_abp\modules\basic-theme\test\Volo.Abp.AspNetCore.Mvc.UI.Bootstrap.Demo\azure-pipelines.yml | azure-pipelines.yml | YAML | 2,237 | 0.8 | 0 | 0.061538 | awesome-app | 222 | 2023-12-09T06:20:37.159624 | BSD-3-Clause | true | 322f354ab2051b8d5784e0e4bb19b868 |
--- \nversion: "3.4"\nservices:\n bootstrap-taghelpers: \n build: \n context: .\n dockerfile: Dockerfile\n image: "${REGISTRY:-}abpio-bootstrap-taghelpers:${TAG:-latest}"\n\n | dataset_sample\yaml\abpframework_abp\modules\basic-theme\test\Volo.Abp.AspNetCore.Mvc.UI.Bootstrap.Demo\docker-compose.yml | docker-compose.yml | YAML | 182 | 0.7 | 0 | 0 | react-lib | 182 | 2025-03-02T16:53:23.873955 | Apache-2.0 | true | 6fa60f4dc2b1f63e1027f06d5b6e346a |
version: '3.4'\n\nservices:\n migrations:\n build:\n context: ../../\n dockerfile: templates/service/database/Dockerfile\n depends_on:\n - sqlserver\n environment:\n - IdentityServer_DB=CmsKit_Identity\n - CmsKit_DB=CmsKit_ModuleDb\n - SA_PASSWORD=yourStrong(!)Password\n | dataset_sample\yaml\abpframework_abp\modules\cms-kit\docker-compose.migrations.yml | docker-compose.migrations.yml | YAML | 298 | 0.7 | 0 | 0 | vue-tools | 394 | 2024-11-10T21:20:08.030397 | BSD-3-Clause | false | 8cd6b6c9dc14043105737ff4a3d513a3 |
version: '3.4'\n\nservices:\n sqlserver:\n environment:\n - SA_PASSWORD=yourStrong(!)Password\n - ACCEPT_EULA=Y\n ports:\n - "51599:1433"\n\n identity-server:\n environment:\n - ASPNETCORE_URLS=http://0.0.0.0:80\n - ConnectionStrings__Default=Server=sqlserver;Database=CmsKit_Identity;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - ConnectionStrings__SqlServerCache=Server=sqlserver;Database=CmsKit_Cache;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n ports:\n - "51600:80"\n\n cms-kit:\n environment:\n - ASPNETCORE_URLS=http://0.0.0.0:80\n - ConnectionStrings__Default=Server=sqlserver;Database=CmsKit_ModuleDb;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - ConnectionStrings__AbpSettingManagement=Server=sqlserver;Database=CmsKit_Identity;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - ConnectionStrings__AbpPermissionManagement=Server=sqlserver;Database=CmsKit_Identity;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - ConnectionStrings__AbpAuditLogging=Server=sqlserver;Database=CmsKit_Identity;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - ConnectionStrings__SqlServerCache=Server=sqlserver;Database=CmsKit_Cache;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - AuthServer__Authority=http://identity-server\n ports:\n - "51601:80" | dataset_sample\yaml\abpframework_abp\modules\cms-kit\docker-compose.override.yml | docker-compose.override.yml | YAML | 1,621 | 0.8 | 0 | 0 | vue-tools | 427 | 2025-06-30T01:33:59.096916 | GPL-3.0 | false | 371026dca0981d439dbf7c4d0ccf4807 |
version: '3.4'\n\nservices:\n sqlserver:\n image: mcr.microsoft.com/mssql/server\n volumes:\n - dbdata:/var/opt/mssql\n \n identity-server:\n build:\n context: ../../\n dockerfile: templates/service/host/IdentityServerHost/Dockerfile\n depends_on:\n - sqlserver \n \n cms-kit:\n build:\n context: ../../\n dockerfile: templates/service/host/Volo.CmsKit.Host/Dockerfile\n depends_on:\n - sqlserver\n - identity-server\n\nvolumes:\n dbdata: | dataset_sample\yaml\abpframework_abp\modules\cms-kit\docker-compose.yml | docker-compose.yml | YAML | 483 | 0.7 | 0 | 0 | vue-tools | 389 | 2023-11-30T19:33:49.089260 | MIT | false | 4681e060184724157997ec71da9d1809 |
version: '3.4'\n\nservices:\n volo-docs-migrator:\n image: 'volosoft/volo-docs-migrator:${TAG:-latest}'\n build:\n context: ../../\n dockerfile: modules/docs/app/VoloDocs.Migrator/Dockerfile\n environment:\n - ConnectionString=Server=sqlserver;Database=VoloDocs;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n depends_on:\n - sqlserver\n | dataset_sample\yaml\abpframework_abp\modules\docs\docker-compose.migrate.yml | docker-compose.migrate.yml | YAML | 401 | 0.7 | 0 | 0 | react-lib | 960 | 2025-01-03T03:51:13.996354 | MIT | false | 0dcfd2426af2de831115c8c30cb2ec53 |
version: '3.4'\n\nservices:\n sqlserver:\n environment:\n - SA_PASSWORD=yourStrong(!)Password\n - ACCEPT_EULA=Y\n ports:\n - "1433:1433"\n\n volo-docs:\n environment:\n - ConnectionString=Server=sqlserver;Database=VoloDocs;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - Title=VoloDocs\n - LogoUrl=/assets/images/Logo.png\n ports:\n - "80:80"\n | dataset_sample\yaml\abpframework_abp\modules\docs\docker-compose.override.yml | docker-compose.override.yml | YAML | 421 | 0.7 | 0 | 0 | python-kit | 821 | 2024-07-24T00:27:12.879012 | MIT | false | 6be685447eb865a33ef36010c7a7743b |
version: '3.4'\n\nservices:\n sqlserver:\n image: mcr.microsoft.com/mssql/server\n volumes:\n - dbdata:/var/opt/mssql\n\n volo-docs:\n image: 'volosoft/volo-docs:${TAG:-latest}'\n build:\n context: ../../\n dockerfile: modules/docs/app/VoloDocs.Web/Dockerfile\n depends_on: \n - sqlserver\n\nvolumes:\n dbdata: | dataset_sample\yaml\abpframework_abp\modules\docs\docker-compose.yml | docker-compose.yml | YAML | 331 | 0.7 | 0 | 0 | python-kit | 90 | 2025-02-17T06:35:35.720180 | MIT | false | 4a573efdc21aac5917da094a951333f9 |
version: '3.9'\n\nservices:\n verdaccio:\n image: verdaccio/verdaccio:4.0\n container_name: 'verdaccio'\n networks:\n - docker_network\n environment:\n - VERDACCIO_PORT=4873\n ports:\n - '4873:4873'\n expose:\n - '4873/tcp'\n publish:\n build:\n context: ./publish-packages\n dockerfile: Dockerfile\n args:\n next_version: ''\n container_name: 'verdaccio_publish'\n networks:\n - docker_network\n depends_on:\n - verdaccio\n expose:\n - '4872'\n app:\n build: ./serve-app\n container_name: 'verdaccio_app'\n networks:\n - docker_network\n depends_on:\n - publish\n ports:\n - '4200:4200'\n expose:\n - '4200/tcp'\nnetworks:\n docker_network:\n driver: bridge\n | dataset_sample\yaml\abpframework_abp\npm\verdaccio-containers\docker-compose.yml | docker-compose.yml | YAML | 754 | 0.7 | 0 | 0 | python-kit | 910 | 2024-10-01T22:54:58.404120 | MIT | false | 15909c76e65594b549693ee098353a5b |
version: '3.4'\n\nservices:\n migrations:\n build:\n context: ../../\n dockerfile: templates/service/database/Dockerfile\n depends_on:\n - sqlserver\n environment:\n - IdentityServer_DB=MyProjectName_Identity\n - MyProjectName_DB=MyProjectName_ModuleDb\n - SA_PASSWORD=yourStrong(!)Password\n | dataset_sample\yaml\abpframework_abp\templates\module\aspnet-core\docker-compose.migrations.yml | docker-compose.migrations.yml | YAML | 319 | 0.7 | 0 | 0 | node-utils | 342 | 2024-06-23T16:46:23.003085 | MIT | false | 8a19b097258f99c7d296a4c76f4faf7e |
version: '3.4'\n\nservices:\n sqlserver:\n environment:\n - SA_PASSWORD=yourStrong(!)Password\n - ACCEPT_EULA=Y\n ports:\n - "51599:1433"\n\n identity-server:\n environment:\n - ASPNETCORE_URLS=http://0.0.0.0:80\n - ConnectionStrings__Default=Server=sqlserver;Database=MyProjectName_Identity;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - ConnectionStrings__SqlServerCache=Server=sqlserver;Database=MyProjectName_Cache;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n ports:\n - "51600:80"\n\n my-project-name:\n environment:\n - ASPNETCORE_URLS=http://0.0.0.0:80\n - ConnectionStrings__Default=Server=sqlserver;Database=MyProjectName_ModuleDb;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - ConnectionStrings__AbpSettingManagement=Server=sqlserver;Database=MyProjectName_Identity;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - ConnectionStrings__AbpPermissionManagement=Server=sqlserver;Database=MyProjectName_Identity;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - ConnectionStrings__AbpAuditLogging=Server=sqlserver;Database=MyProjectName_Identity;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - ConnectionStrings__SqlServerCache=Server=sqlserver;Database=MyProjectName_Cache;Trusted_Connection=True;User=sa;Password=yourStrong(!)Password;Integrated Security=false\n - AuthServer__Authority=http://identity-server\n ports:\n - "51601:80" | dataset_sample\yaml\abpframework_abp\templates\module\aspnet-core\docker-compose.override.yml | docker-compose.override.yml | YAML | 1,678 | 0.8 | 0 | 0 | node-utils | 797 | 2024-06-01T02:01:18.634992 | Apache-2.0 | false | b8e9c7571c8e660e4bb6972d643bfba8 |
version: '3.4'\n\nservices:\n sqlserver:\n image: mcr.microsoft.com/mssql/server\n volumes:\n - dbdata:/var/opt/mssql\n \n identity-server:\n build:\n context: ../../\n dockerfile: templates/service/host/IdentityServerHost/Dockerfile\n depends_on:\n - sqlserver \n \n my-project-name:\n build:\n context: ../../\n dockerfile: templates/service/host/MyCompanyName.MyProjectName.Host/Dockerfile\n depends_on:\n - sqlserver\n - identity-server\n\nvolumes:\n dbdata: | dataset_sample\yaml\abpframework_abp\templates\module\aspnet-core\docker-compose.yml | docker-compose.yml | YAML | 507 | 0.7 | 0 | 0 | python-kit | 443 | 2024-05-31T21:46:48.031782 | GPL-3.0 | false | 7cbeb6021bd4bd6711a8507413737e90 |
name: Bug report\ndescription: File a bug report related to Akaunting\nbody:\n - type: markdown\n attributes:\n value: |\n Thanks for taking the time to fill out this bug report!\n - type: input\n id: akaunting\n attributes:\n label: Akaunting version\n description: "Please provide the full Akaunting version of your installation."\n placeholder: "2.1.20"\n validations:\n required: true\n - type: input\n id: php\n attributes:\n label: PHP version\n description: "Please provide the full PHP version that is powering Akaunting."\n placeholder: "7.4.10"\n validations:\n required: true\n - type: input\n id: os\n attributes:\n label: Operating system\n description: "Which operating system do you use? Please provide the version as well."\n placeholder: "Ubuntu 20.04"\n validations:\n required: true\n - type: textarea\n id: steps\n attributes:\n label: Steps to reproduce\n description: Which steps do we need to take to reproduce this error?\n validations:\n required: true\n - type: textarea\n id: expected\n attributes:\n label: Expected result\n description: What is the expected result?\n validations:\n required: true\n - type: textarea\n id: actual\n attributes:\n label: Actual result\n description: What is the actual result?\n validations:\n required: true\n - type: textarea\n id: comments\n attributes:\n label: Additional comments\n description: Anything else we should know about?\n - type: textarea\n id: logs\n attributes:\n label: Relevant log output\n description: Copy and paste any relevant log output. No need for backticks.\n render: shell\n | dataset_sample\yaml\akaunting_akaunting\.github\ISSUE_TEMPLATE\bug.yml | bug.yml | YAML | 1,723 | 0.85 | 0.031746 | 0 | python-kit | 764 | 2024-02-06T07:46:01.129690 | BSD-3-Clause | false | f94400948812ce5e94b26bdb61c9f854 |
blank_issues_enabled: false\ncontact_links:\n - name: Feature request\n url: https://akaunting.com/forum\n about: 'For ideas or feature requests, start a new discussion'\n - name: Support, help, and other\n url: https://akaunting.com/support\n about: 'This repository is only for reporting bugs'\n - name: Documentation\n url: https://github.com/akaunting/docs\n about: For documentation improvements, open a pull request at the akaunting/docs repository\n | dataset_sample\yaml\akaunting_akaunting\.github\ISSUE_TEMPLATE\config.yml | config.yml | YAML | 466 | 0.8 | 0.090909 | 0 | vue-tools | 113 | 2025-01-14T22:57:49.344879 | BSD-3-Clause | false | 44d5d29bed6683eb6a4dda7129abcc74 |
name: Translations\n\non:\n schedule:\n - cron: '0 0 * * *'\n workflow_dispatch:\n\njobs:\n sync:\n name: Sync\n\n runs-on: ubuntu-latest\n\n steps:\n\n - name: Checkout\n uses: actions/checkout@v4\n\n - name: Sync with Crowdin\n uses: crowdin/github-action@master\n with:\n upload_sources: true\n upload_translations: true\n download_translations: true\n skip_untranslated_files: true\n\n source: 'resources/lang/en-GB/*.php'\n translation: 'resources/lang/%locale%/%original_file_name%'\n\n localization_branch_name: 'translations'\n commit_message: 'new crowdin translations'\n pull_request_title: 'New Crowdin translations'\n pull_request_body: 'https://crowdin.com/project/akaunting'\n pull_request_labels: 'Translation'\n \n project_id: ${{ secrets.CROWDIN_CORE_ID }}\n token: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n | dataset_sample\yaml\akaunting_akaunting\.github\workflows\translations.yml | translations.yml | YAML | 980 | 0.8 | 0 | 0 | react-lib | 388 | 2025-01-18T15:03:17.314506 | Apache-2.0 | false | 15ea5b9ff3ab1412a6bb25a8fc8f535e |
# These are supported funding model platforms\n\ncustom: ['https://amethyst.rs/donate']\nopen_collective: amethyst\n | dataset_sample\yaml\amethyst_amethyst\.github\FUNDING.yml | FUNDING.yml | YAML | 112 | 0.8 | 0 | 0.333333 | react-lib | 77 | 2024-11-18T11:11:54.055738 | GPL-3.0 | false | 5edbd03b88464487c50058616301eb6a |
# Number of days of inactivity before an Issue or Pull Request becomes stale\ndaysUntilStale: 180\n\n# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.\n# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.\ndaysUntilClose: false\n\n# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)\nonlyLabels: []\n\n# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable\nexemptLabels:\n - "status: waiting for merge"\n\n# Set to true to ignore issues in a project (defaults to false)\nexemptProjects: false\n\n# Set to true to ignore issues in a milestone (defaults to false)\nexemptMilestones: false\n\n# Set to true to ignore issues with an assignee (defaults to false)\nexemptAssignees: false\n\n# Label to use when marking as stale\nstaleLabel: stale\n\n# Comment to post when marking an issue as stale. Set to `false` to disable\nmarkComment: >\n This issue has been automatically marked as stale because it has not had\n recent activity. Maybe it's time to revisit this?\n\n# Comment to post when removing the stale label.\n# unmarkComment: >\n# Your comment here.\n\n# Comment to post when closing a stale Issue or Pull Request.\n# closeComment: >\n# Your comment here.\n\n# Limit the number of actions per hour, from 1-30. Default is 30\nlimitPerRun: 30\n\n# Limit to only `issues` or `pulls`\n# only: issues\n\n# Optionally, specify configuration settings that are specific to just 'issues' or 'pulls':\n# pulls:\n# daysUntilStale: 30\n# markComment: >\n# This pull request has been automatically marked as stale because it has not had\n# recent activity. It will be closed if no further activity occurs. Thank you\n# for your contributions.\n\n# issues:\n# exemptLabels:\n# - confirmed\n | dataset_sample\yaml\amethyst_amethyst\.github\stale.yml | stale.yml | YAML | 1,860 | 0.8 | 0.071429 | 0.690476 | react-lib | 544 | 2024-02-04T09:47:03.086171 | GPL-3.0 | false | 8b811e2d5b9dc88046bad27719474bf9 |
name: CI\n\non:\n push:\n branches:\n - main\n # bors needs CI to trigger for pushes to its staging/trying branches\n - staging\n - trying\n pull_request:\n\nenv:\n RUSTFLAGS: -Cdebuginfo=0\n CARGO_TERM_COLOR: always\n CARGO_INCREMENTAL: 0\n RUST_BACKTRACE: 1\n\njobs:\n tests:\n name: Tests\n runs-on: ${{ matrix.os }}\n continue-on-error: ${{ matrix.toolchain == 'nightly' }}\n strategy:\n fail-fast: true\n matrix:\n os: [macos-latest, windows-latest, ubuntu-18.04]\n toolchain: [stable, beta, nightly]\n steps:\n - uses: actions/checkout@v2\n\n - name: install linux deps\n run: |\n sudo apt update\n sudo apt install gcc pkg-config openssl libasound2-dev cmake build-essential python3 libfreetype6-dev libexpat1-dev libxcb-composite0-dev libssl-dev libx11-dev pulseaudio libxkbcommon-x11-0 libxkbcommon-dev\n if: contains(matrix.os, 'ubuntu')\n\n - name: install ${{ matrix.toolchain }} toolchain\n id: install_toolchain\n uses: actions-rs/toolchain@v1\n with:\n toolchain: ${{ matrix.toolchain }}\n profile: minimal\n override: true\n\n - uses: seanmiddleditch/gha-setup-ninja@master\n if: matrix.os == 'windows-latest'\n\n - run: rustup component add rustfmt\n if: matrix.toolchain == 'nightly' && matrix.os == 'ubuntu-latest'\n\n - run: cargo +nightly fmt --all -- --check\n if: matrix.toolchain == 'nightly' && matrix.os == 'ubuntu-latest'\n\n - run: rustup component add clippy\n if: matrix.toolchain == 'nightly'\n\n - run: cargo +nightly clippy -Z unstable-options --workspace --all-targets --all-features\n if: matrix.toolchain == 'nightly'\n\n - name: clean clippy-generated amethyst libs\n # Remove the clippy-generated amethyst files.\n # They mess up `mdbook test` later on for some reason\n run: rm -rf ./target/debug/deps/libamethyst*\n if: matrix.toolchain == 'stable' && matrix.os == 'ubuntu-latest'\n\n - run: cargo test --workspace --all-features --all-targets\n\n - run: cargo test --workspace --all-features --doc\n continue-on-error: true\n\n - uses: peaceiris/actions-mdbook@v1\n with:\n mdbook-version: 'latest'\n if: matrix.toolchain == 'stable' && matrix.os == 'ubuntu-latest'\n\n - run: cargo install mdbook-linkcheck\n if: matrix.toolchain == 'stable' && matrix.os == 'ubuntu-latest'\n\n - run: mdbook build book\n if: matrix.toolchain == 'stable' && matrix.os == 'ubuntu-latest'\n continue-on-error: true\n\n - run: mdbook test -L ./target/debug/deps book\n if: matrix.toolchain == 'stable' && matrix.os == 'ubuntu-latest'\n continue-on-error: true\n | dataset_sample\yaml\amethyst_amethyst\.github\workflows\ci.yml | ci.yml | YAML | 2,750 | 0.8 | 0.152941 | 0.044118 | vue-tools | 618 | 2024-03-29T16:24:05.155258 | BSD-3-Clause | false | 402dca28a0507d5b340aa333337cbdf9 |
name: cargo-deny\n\non: [pull_request]\n\nenv:\n CARGO_TERM_COLOR: always\n\njobs:\n cargo-deny:\n runs-on: ubuntu-latest\n strategy:\n matrix:\n checks:\n - advisories\n - bans licenses sources\n\n # Prevent sudden announcement of a new advisory from failing ci:\n continue-on-error: ${{ matrix.checks == 'advisories' }}\n\n steps:\n - uses: actions/checkout@v2\n - uses: EmbarkStudios/cargo-deny-action@v1\n with:\n command: check ${{ matrix.checks }} | dataset_sample\yaml\amethyst_amethyst\.github\workflows\deny.yml | deny.yml | YAML | 495 | 0.8 | 0 | 0.052632 | vue-tools | 417 | 2025-01-08T23:28:36.165548 | BSD-3-Clause | false | dbc80ba077da74e0b17ca3fca790cd06 |
####################################################################################\n####################################################################################\n# Angular Code Ownership #\n####################################################################################\n####################################################################################\n#\n# Configuration of code ownership and review approvals for the angular/angular repo.\n#\n# More info: https://docs.pullapprove.com/\n#\n# =========================================================\n# General rules / philosophy\n# =========================================================\n#\n# - We trust that people do the right thing and won't approve changes they don't feel confident reviewing\n# - We enforce that only approved PRs are merged ensuring that unreviewed code isn't accidentally merged\n# - We distribute approval rights as much as possible to help us scale better\n# - Groups have one or two global approvers groups as fallbacks:\n# - @angular/fw-global-approvers: for approving minor changes, large-scale refactorings, and emergency situations.\n# - @angular/fw-global-approvers-for-docs-only-changes: for approving minor documentation-only changes that don't require engineering review\n# - A small number of file groups have very limited number of reviewers because incorrect changes to the files they guard would have serious consequences (e.g. security, public api)\n#\n# Configuration nuances:\n#\n# - This configuration works in conjunction with the protected branch settings that require all changes to be made via pull requests with at least one approval.\n# - This approval can come from an appropriate codeowner, or any repo collaborator (person with write access) if the PR is authored by a codeowner.\n# - All groups whose pullapprove rules are matched will be required for overall approval.\n#\n# NOTE:\n# In the case of emergency, the repo administrators which include the current angular caretaker\n# can bypass this reviewer approval requirement, this is expected as a last resort and to be\n# done exceedingly sparingly.\n\n####################################################################################\n# GitHub usernames\n####################################################################################\n# See reviewer list under `required-minimum-review` group. Team member names and\n# usernames are managed there.\n\n####################################################################################\n# Approval Groups\n####################################################################################\n# =========================================================\n# @angular/framework-global-approvers\n# =========================================================\n# Used for approving minor changes, large-scale refactorings, and in emergency situations.\n#\n# alxhub\n# jelbourn\n# josephperrott\n#\n# =========================================================\n# @angular/framework-global-approvers-for-docs-only-changes\n# =========================================================\n# Used for approving minor documentation-only changes that don't require engineering review.\n\nversion: 3\n\n# availability:\n# users_unavailable: ['atscott']\n\n# Meta field that goes unused by PullApprove to allow for defining aliases to be\n# used throughout the config.\nmeta:\n # Note: Because all inactive groups start as pending, we are only checking pending and rejected active groups.\n no-groups-above-this-pending: &no-groups-above-this-pending len(groups.active.pending.exclude("required-minimum-review")) == 0\n no-groups-above-this-rejected: &no-groups-above-this-rejected len(groups.active.rejected.exclude("required-minimum-review")) == 0\n\n defaults: &defaults\n reviews:\n # Authors provide their approval implicitly, this approval allows for a reviewer\n # from a group not to need a review specifically for an area of the repository\n # they own. This is coupled with the `required-minimum-review` group which requires\n # that all PRs are reviewed by at least one team member who is not the author of\n # the PR.\n author_value: 1\n\n# turn on 'draft' support\n# https://docs.pullapprove.com/config/github-api-version/\n# https://developer.github.com/v3/previews/#draft-pull-requests\ngithub_api_version: 'shadow-cat-preview'\n\n# https://docs.pullapprove.com/config/overrides/\n# Note that overrides are processed in order.\noverrides:\n # For PRs which are still being worked on, either still in draft mode or indicated through WIP in\n # title or label, PullApprove stays in a pending state until its ready for review.\n - if: "draft or 'WIP' in title or 'PR state: WIP' in labels"\n status: pending\n explanation: 'Waiting to send reviews as PR is WIP'\n # Disable PullApprove on specific PRs by adding the `PullApprove: disable` label\n - if: "'PullApprove: disable' in labels"\n status: success\n explanation: "PullApprove skipped because of 'PullApprove: disable' label"\n # If no file matching based groups are active, report this pull request as failing. Most likely,\n # the PR author would need to update the PullApprove config, or create new group.\n - if: len(groups.active.exclude("required-minimum-review").exclude("global-*")) == 0 and len(groups.approved.include("global-*")) == 0\n status: failure\n explanation: 'At least one group must match this PR. Please update an existing review group, or create a new group.'\n # If any global dev-infra approval is given the status should be passing.\n - if: len(groups.approved.include("global-dev-infra-approvers")) == 1\n status: success\n explanation: 'Passing as globally approved by dev-infra'\n # If any global docs approval is given the status should be passing.\n - if: len(groups.approved.include("global-docs-approvers")) == 1\n status: success\n explanation: 'Passing as globally approved by docs'\n # If any global approval is given the status should be passing.\n - if: len(groups.approved.include("global-approvers")) == 1\n status: success\n explanation: 'Passing as globally approved by global approvers'\n\ngroups:\n # =========================================================\n # Framework: Compiler\n # =========================================================\n fw-compiler:\n <<: *defaults\n conditions:\n - >\n contains_any_globs(files, [\n 'packages/compiler/**/{*,.*}',\n 'packages/examples/compiler/**/{*,.*}',\n 'packages/compiler-cli/**/{*,.*}',\n 'packages/language-service/**/{*,.*}',\n ])\n reviewers:\n users:\n - alxhub\n - AndrewKushnir\n - atscott\n - crisbeto\n - devversion\n - kirjs\n - JoostK\n - mmalerba\n\n # =========================================================\n # Framework: General (most code in our packages)\n # =========================================================\n fw-general:\n <<: *defaults\n conditions:\n - >\n contains_any_globs(files.exclude('packages/core/primitives/*'), [\n 'contributing-docs/public-api-surface.md',\n 'integration/**/{*,.*}',\n 'modules/**/{*,.*}',\n 'packages/animations/**/{*,.*}',\n 'packages/benchpress/**/{*,.*}',\n 'packages/common/**/{*,.*}',\n 'packages/core/**/{*,.*}',\n 'packages/docs/**/{*,.*}',\n 'packages/elements/**/{*,.*}',\n 'packages/examples/**/{*,.*}',\n 'packages/forms/**/{*,.*}',\n 'packages/localize/**/{*,.*}',\n 'packages/misc/**/{*,.*}',\n 'packages/platform-browser/**/{*,.*}',\n 'packages/platform-browser-dynamic/**/{*,.*}',\n 'packages/platform-server/**/{*,.*}',\n 'packages/ssr/**/{*,.*}',\n 'packages/router/**/{*,.*}',\n 'packages/service-worker/**/{*,.*}',\n 'packages/upgrade/**/{*,.*}',\n ])\n reviewers:\n users:\n - alxhub\n - AndrewKushnir\n - atscott\n - crisbeto\n - devversion\n - kirjs\n - ~jelbourn\n - thePunderWoman\n - pkozlowski-opensource\n - mmalerba\n\n # =========================================================\n # Framework: Security-sensitive files which require extra review\n # =========================================================\n fw-security:\n <<: *defaults\n conditions:\n - >\n contains_any_globs(files, [\n 'packages/core/src/sanitization/**/{*,.*}',\n 'packages/core/test/linker/security_integration_spec.ts',\n 'packages/compiler/src/schema/**/{*,.*}',\n 'packages/platform-browser/src/security/**/{*,.*}',\n 'packages/tsconfig-tsec-base.json',\n 'packages/tsec-exemption.json',\n 'tools/tsec.bzl',\n 'adev/src/content/guide/security.md',\n 'adev/src/content/examples/security/**/{*,.*}',\n ])\n reviewers:\n users:\n - alxhub\n - jelbourn\n - josephperrott\n - pkozlowski-opensource\n reviews:\n request: -1 # request reviews from everyone\n required: 2 # require at least 2 approvals\n reviewed_for: required\n\n # =========================================================\n # Bazel\n # =========================================================\n bazel:\n <<: *defaults\n conditions:\n - >\n contains_any_globs(files, [\n 'packages/bazel/**/{*,.*}',\n ])\n reviewers:\n users:\n - devversion\n - josephperrott\n\n # =========================================================\n # zone.js\n # =========================================================\n zone-js:\n <<: *defaults\n conditions:\n - >\n contains_any_globs(files.exclude('yarn.lock'), [\n 'packages/zone.js/**/{*,.*}',\n ])\n reviewers:\n users:\n - JiaLiPassion\n\n # =========================================================\n # Tooling: Compiler API shared with Angular CLI\n #\n # Changing this API might break Angular CLI, so we require\n # the CLI team to approve changes here.\n # =========================================================\n tooling-cli-shared-api:\n conditions:\n - >\n contains_any_globs(files, [\n 'packages/compiler-cli/private/tooling.ts',\n 'packages/localize/tools/index.ts'\n ])\n reviewers:\n users:\n - alan-agius4\n - clydin\n - dgp1130\n reviews:\n request: -1 # request reviews from everyone\n required: 2 # require at least 2 approvals\n reviewed_for: required\n\n # =========================================================\n # Documentation content\n # =========================================================\n angular-dev:\n <<: *defaults\n conditions:\n - >\n contains_any_globs(files, [\n 'adev/**/{*,.*}',\n ])\n reviewers:\n users:\n - alan-agius4\n - alxhub\n - AndrewKushnir\n - atscott\n - bencodezen\n - crisbeto\n - kirjs\n - ~JeanMeche\n - jelbourn\n - thePunderWoman\n - devversion\n - josephperrott\n - pkozlowski-opensource\n - mgechev\n - MarkTechson\n - kirjs\n - mmalerba\n - ~hawkgs\n\n # =========================================================\n # Angular DevTools\n # =========================================================\n devtools:\n <<: *defaults\n conditions:\n - >\n contains_any_globs(files, [\n 'devtools/**/{*,.*}',\n ])\n reviewers:\n users:\n - AleksanderBodurri\n - devversion\n - dgp1130\n - josephperrott\n - mgechev\n - MarkTechson\n - ~JeanMeche\n\n # =========================================================\n # Dev-infra\n # =========================================================\n dev-infra:\n <<: *defaults\n conditions:\n - >\n contains_any_globs(files.exclude('.pullapprove.yml'), [\n '{*,.*}',\n '.devcontainer/**/{*,.*}',\n '.github/**/{*,.*}',\n '.husky/**/{*,.*}',\n '.ng-dev/**/{*,.*}',\n '.vscode/**/{*,.*}',\n '.yarn/**/{*,.*}',\n 'contributing-docs/*.md',\n 'contributing-docs/images/**/{*,.*}',\n 'goldens/{*,.*}',\n 'goldens/public-api/manage.js',\n 'modules/{*,.*}',\n 'packages/{*,.*}',\n 'packages/examples/test-utils/**/{*,.*}',\n 'packages/private/**/{*,.*}',\n 'packages/examples/{*,.*}',\n 'scripts/**/{*,.*}',\n 'third_party/**/{*,.*}',\n 'tools/bazel-repo-patches/**/{*,.*}',\n 'tools/circular_dependency_test/**/{*,.*}',\n 'tools/contributing-stats/**/{*,.*}',\n 'tools/esm-interop/**/{*,.*}',\n 'tools/gulp-tasks/**/{*,.*}',\n 'tools/legacy-saucelabs/**/{*,.*}',\n 'tools/manual_api_docs/**/{*,.*}',\n 'tools/npm-patches/**/{*,.*}',\n 'tools/rxjs/**/{*,.*}',\n 'tools/saucelabs-daemon/**/{*,.*}',\n 'tools/saucelabs/**/{*,.*}',\n 'tools/symbol-extractor/**/{*,.*}',\n 'tools/testing/**/{*,.*}',\n 'tools/tslint/**/{*,.*}',\n 'tools/utils/**/{*,.*}',\n 'tools/yarn/**/{*,.*}',\n 'tools/{*,.*}',\n '**/*.bzl'\n ])\n - author not in ["angular-robot"]\n reviewers:\n users:\n - devversion\n - josephperrott\n\n # =========================================================\n # Renovate Changes\n # =========================================================\n renovate-changes:\n <<: *defaults\n conditions:\n - author in ["angular-robot"]\n reviewers:\n users:\n - ~alan-agius4\n teams:\n - framework-team\n\n # =========================================================\n # Public API\n # =========================================================\n public-api:\n <<: *defaults\n conditions:\n - *no-groups-above-this-pending\n - *no-groups-above-this-rejected\n - >\n contains_any_globs(files.exclude("goldens/public-api/manage.js"), [\n 'goldens/public-api/**/{*,.*}',\n ])\n reviewers:\n users:\n - AndrewKushnir\n - alxhub\n - atscott\n - ~jelbourn\n - thePunderWoman\n - pkozlowski-opensource\n - kirjs\n - mmalerba\n - crisbeto\n - devversion\n - ~iteriani\n - ~tbondwilkinson\n - ~rahatarmanahmed\n reviews:\n request: 3 # Request reviews from 3 people\n required: 2 # Require that 2 people approve\n reviewed_for: required\n\n # ================================================\n # Size tracking\n # ================================================\n size-tracking:\n <<: *defaults\n conditions:\n - *no-groups-above-this-pending\n - *no-groups-above-this-rejected\n - >\n contains_any_globs(files, [\n 'goldens/size-tracking/**/{*,.*}'\n ])\n reviewers:\n users:\n - alxhub\n - AndrewKushnir\n - atscott\n - kirjs\n - ~jelbourn\n - thePunderWoman\n - pkozlowski-opensource\n - mmalerba\n reviews:\n request: 2 # Request reviews from 2 people\n required: 1 # Require that 1 person approve\n reviewed_for: required\n\n ####################################################################################\n # Special Cases\n ####################################################################################\n\n # =========================================================\n # Code Ownership\n # =========================================================\n code-ownership:\n <<: *defaults\n conditions:\n - >\n contains_any_globs(files, [\n '.pullapprove.yml'\n ])\n reviewers:\n users:\n - alxhub\n - AndrewKushnir\n - andrewseguin\n - dgp1130\n - ~jelbourn\n - thePunderWoman\n - josephperrott\n\n # =========================================================\n # Primitives\n # =========================================================\n\n # Angular team required reviews\n primitives:\n <<: *defaults\n conditions:\n - >\n contains_any_globs(files, [\n 'packages/core/primitives/**/{*,.*}',\n ])\n reviewers:\n users:\n - pkozlowski-opensource # Pawel Kozlowski\n - alxhub # Alex Rickabaugh\n - thePunderWoman # Jessica Janiuk\n - AndrewKushnir # Andrew Kushnir\n - atscott # Andrew Scott\n labels:\n pending: 'requires: TGP'\n approved: 'requires: TGP'\n rejected: 'requires: TGP'\n\n # External team required reviews\n primitives-shared:\n <<: *defaults\n conditions:\n - >\n contains_any_globs(files, [\n 'packages/core/primitives/**/{*,.*}',\n ])\n reviewers:\n users:\n - csmick # Cameron Smick\n - mturco # Matt Turco\n - iteriani # Thomas Nguyen\n - tbondwilkinson # Tom Wilkinson\n - rahatarmanahmed # Rahat Ahmed\n - ENAML # Ethan Cline\n labels:\n pending: 'requires: TGP'\n approved: 'requires: TGP'\n rejected: 'requires: TGP'\n\n ####################################################################################\n # Override managed result groups\n #\n # Groups which are only used to determine the value of an override are managed at\n # the bottom of the list as they will set a status on the PR directly, they\n # therefore can always be process last without concern.\n ####################################################################################\n\n # =========================================================\n # Global Approvers\n #\n # All reviews performed for global approvals require using\n # the `Reviewed-for:` specifier to set the approval\n # specificity as documented at:\n # https://docs.pullapprove.com/reviewed-for/\n # =========================================================\n global-approvers:\n type: optional\n reviewers:\n teams:\n - framework-global-approvers\n reviews:\n request: 0\n required: 1\n reviewed_for: required\n\n # =========================================================\n # Global Approvers For Docs\n #\n # All reviews performed for global docs approvals require\n # using the `Reviewed-for:` specifier to set the approval\n # specificity as documented at:\n # https://docs.pullapprove.com/reviewed-for/\n # =========================================================\n global-docs-approvers:\n type: optional\n reviewers:\n teams:\n - framework-global-approvers-for-docs-only-changes\n reviews:\n request: 0\n required: 1\n reviewed_for: required\n\n # =========================================================\n # Global Approvers For Dev-Infra changes\n #\n # All reviews performed for global dev-infra approvals\n # require using the `Reviewed-for:` specifier to set the\n # approval specificity as documented at:\n # https://docs.pullapprove.com/reviewed-for/\n # =========================================================\n global-dev-infra-approvers:\n type: optional\n reviewers:\n teams:\n - dev-infra-framework\n reviews:\n request: 0\n required: 1\n reviewed_for: required\n\n # =========================================================\n # Require review on all PRs\n #\n # All PRs require at least one review. This rule will not\n # request any reviewers, however will require that at least\n # one review is provided before the group is satisfied.\n # =========================================================\n required-minimum-review:\n reviews:\n request: 0 # Do not request any reviews from the reviewer group\n required: 1 # Require that all PRs have approval from at least one of the users in the group\n author_value: 0 # The author of the PR cannot provide an approval for themself\n reviewed_for: ignored # All reviews apply to this group whether noted via Reviewed-for or not\n reviewers:\n teams:\n # Any member of the team can provide a review to perform the minimum review required\n - team\n | dataset_sample\yaml\angular_angular\.pullapprove.yml | .pullapprove.yml | YAML | 20,213 | 0.95 | 0.053265 | 0.298913 | vue-tools | 470 | 2023-11-27T12:08:59.956709 | BSD-3-Clause | false | 49e2bfff14b68bded45998a27ba564cc |
# This configuration file was automatically generated by Gitpod.\n# Please adjust to your needs (see https://www.gitpod.io/docs/config-gitpod-file)\n# and commit this file to your remote git repository to share the goodness with others.\nimage: gitpod/workspace-dotnet\ntasks:\n - init: |\n npm install\n command: |\n dotnet build ./site/AntDesign.Docs.Build/AntDesign.Docs.Build.csproj\n npm run start\nports:\n - port: 5000\n visibility: public\n onOpen: open-browser\n - port: 5001\n visibility: public\n onOpen: ignore\n - port: 30000-60000\n visibility: public\n onOpen: ignore | dataset_sample\yaml\ant-design-blazor_ant-design-blazor\.gitpod.yml | .gitpod.yml | YAML | 603 | 0.8 | 0 | 0.15 | react-lib | 576 | 2024-10-21T03:08:07.559500 | Apache-2.0 | false | ff262e62e3668f307092aafbd8d16565 |
coverage:\n status:\n patch:\n default:\n informational: true\n project:\n default:\n informational: true\n\ncomment:\n require_changes: false\n require_head: false\n require_base: false\n layout: "condensed_header, diff, files"\n hide_project_coverage: false\n behavior: default\n\ngithub_checks:\n annotations: false\n | dataset_sample\yaml\antirez_redis\codecov.yml | codecov.yml | YAML | 339 | 0.85 | 0 | 0 | node-utils | 336 | 2024-10-19T02:17:51.108604 | BSD-3-Clause | false | f4f996e10e325b199d5268b85cae72db |
# To get started with Dependabot version updates, you'll need to specify which\n# package ecosystems to update and where the package manifests are located.\n# Please see the documentation for all configuration options:\n# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates\n\nversion: 2\nupdates:\n - package-ecosystem: github-actions\n directory: /\n schedule:\n interval: weekly\n - package-ecosystem: pip\n directory: /.codespell\n schedule:\n interval: weekly\n | dataset_sample\yaml\antirez_redis\.github\dependabot.yml | dependabot.yml | YAML | 526 | 0.8 | 0.133333 | 0.285714 | react-lib | 619 | 2025-04-06T19:47:22.982115 | GPL-3.0 | false | adbea082db7b10cc83cd292ca49e368e |
name: CI\n\non: [push, pull_request]\n\njobs:\n\n test-ubuntu-latest:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - name: make\n # Fail build if there are warnings\n # build with TLS just for compilation coverage\n run: make REDIS_CFLAGS='-Werror' BUILD_TLS=yes\n - name: test\n run: |\n sudo apt-get install tcl8.6 tclx\n ./runtest --verbose --tags -slow --dump-logs\n - name: module api test\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs\n - name: validate commands.def up to date\n run: |\n touch src/commands/ping.json\n make commands.def\n dirty=$(git diff)\n if [[ ! -z $dirty ]]; then echo $dirty; exit 1; fi\n\n test-sanitizer-address:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - name: make\n # build with TLS module just for compilation coverage\n run: make SANITIZER=address REDIS_CFLAGS='-Werror -DDEBUG_ASSERTIONS' BUILD_TLS=module\n - name: testprep\n run: sudo apt-get install tcl8.6 tclx -y\n - name: test\n run: ./runtest --verbose --tags -slow --dump-logs\n - name: module api test\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs\n\n build-debian-old:\n runs-on: ubuntu-latest\n container: debian:buster\n steps:\n - uses: actions/checkout@v4\n - name: make\n run: |\n apt-get update && apt-get install -y build-essential\n make REDIS_CFLAGS='-Werror'\n\n build-macos-latest:\n runs-on: macos-latest\n steps:\n - uses: actions/checkout@v4\n - name: make\n run: make REDIS_CFLAGS='-Werror'\n\n build-32bit:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - name: make\n run: |\n sudo apt-get update && sudo apt-get install libc6-dev-i386 gcc-multilib g++-multilib\n make REDIS_CFLAGS='-Werror' 32bit\n\n build-libc-malloc:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - name: make\n run: make REDIS_CFLAGS='-Werror' MALLOC=libc\n\n build-centos-jemalloc:\n runs-on: ubuntu-latest\n container: quay.io/centos/centos:stream9\n steps:\n - uses: actions/checkout@v4\n - name: make\n run: |\n dnf -y install which gcc gcc-c++ make\n make REDIS_CFLAGS='-Werror'\n\n build-old-chain-jemalloc:\n runs-on: ubuntu-latest\n container: ubuntu:20.04\n steps:\n - uses: actions/checkout@v4\n - name: make\n run: |\n apt-get update\n apt-get install -y gnupg2\n echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial main" >> /etc/apt/sources.list\n echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial universe" >> /etc/apt/sources.list\n apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 40976EAF437D05B5\n apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32\n apt-get update\n apt-get install -y make gcc-4.8 g++-4.8\n update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.8 100\n update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-4.8 100\n make CC=gcc REDIS_CFLAGS='-Werror'\n | dataset_sample\yaml\antirez_redis\.github\workflows\ci.yml | ci.yml | YAML | 3,147 | 0.95 | 0.058824 | 0.032609 | python-kit | 877 | 2025-04-17T06:37:34.522996 | GPL-3.0 | false | 9c10cce3f02375446acaf61a48b88d83 |
name: "Codecov"\n\n# Enabling on each push is to display the coverage changes in every PR, \n# where each PR needs to be compared against the coverage of the head commit\non: [push, pull_request]\n\njobs:\n code-coverage:\n runs-on: ubuntu-22.04\n\n steps:\n - name: Checkout repository\n uses: actions/checkout@v4\n\n - name: Install lcov and run test\n run: |\n sudo apt-get install lcov\n make lcov\n\n - name: Upload coverage reports to Codecov\n uses: codecov/codecov-action@v4\n with:\n token: ${{ secrets.CODECOV_TOKEN }}\n file: ./src/redis.info\n | dataset_sample\yaml\antirez_redis\.github\workflows\codecov.yml | codecov.yml | YAML | 595 | 0.8 | 0 | 0.105263 | react-lib | 84 | 2024-06-08T05:52:38.594674 | GPL-3.0 | false | 4b59b45b48ad8e603d7d3ad4fb64b63d |
name: "CodeQL"\n\non:\n pull_request:\n schedule:\n # run weekly new vulnerability was added to the database\n - cron: '0 0 * * 0'\n\njobs:\n analyze:\n name: Analyze\n runs-on: ubuntu-latest\n if: github.event_name != 'schedule' || github.repository == 'redis/redis'\n\n strategy:\n fail-fast: false\n matrix:\n language: [ 'cpp' ]\n\n steps:\n - name: Checkout repository\n uses: actions/checkout@v4\n\n - name: Initialize CodeQL\n uses: github/codeql-action/init@v3\n with:\n languages: ${{ matrix.language }}\n\n - name: Autobuild\n uses: github/codeql-action/autobuild@v3\n\n - name: Perform CodeQL Analysis\n uses: github/codeql-action/analyze@v3\n | dataset_sample\yaml\antirez_redis\.github\workflows\codeql-analysis.yml | codeql-analysis.yml | YAML | 706 | 0.8 | 0.030303 | 0.038462 | node-utils | 251 | 2025-04-06T09:19:18.163481 | BSD-3-Clause | false | 55568cb9cdf5a14fb2063c79ac997d74 |
# Creates and uploads a Coverity build on a schedule\nname: Coverity Scan\non:\n schedule:\n # Run once daily, since below 500k LOC can have 21 builds per week, per https://scan.coverity.com/faq#frequency\n - cron: '0 0 * * *'\n # Support manual execution\n workflow_dispatch:\njobs:\n coverity:\n if: github.repository == 'redis/redis'\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@main\n - name: Download and extract the Coverity Build Tool\n run: |\n wget -q https://scan.coverity.com/download/cxx/linux64 --post-data "token=${{ secrets.COVERITY_SCAN_TOKEN }}&project=redis-unstable" -O cov-analysis-linux64.tar.gz\n mkdir cov-analysis-linux64\n tar xzf cov-analysis-linux64.tar.gz --strip 1 -C cov-analysis-linux64\n - name: Install Redis dependencies\n run: sudo apt install -y gcc tcl8.6 tclx procps libssl-dev\n - name: Build with cov-build\n run: cov-analysis-linux64/bin/cov-build --dir cov-int make\n - name: Upload the result\n run: |\n tar czvf cov-int.tgz cov-int\n curl \\n --form project=redis-unstable \\n --form email=${{ secrets.COVERITY_SCAN_EMAIL }} \\n --form token=${{ secrets.COVERITY_SCAN_TOKEN }} \\n --form file=@cov-int.tgz \\n https://scan.coverity.com/builds\n | dataset_sample\yaml\antirez_redis\.github\workflows\coverity.yml | coverity.yml | YAML | 1,322 | 0.8 | 0.03125 | 0.09375 | vue-tools | 590 | 2023-08-06T02:35:42.714639 | GPL-3.0 | false | 8bd1b2c586074ba35d495f427d9281c4 |
name: Daily\n\non:\n pull_request:\n branches:\n # any PR to a release branch.\n - '[0-9].[0-9]'\n schedule:\n - cron: '0 0 * * *'\n workflow_dispatch:\n inputs:\n skipjobs:\n description: 'jobs to skip (delete the ones you wanna keep, do not leave empty)'\n default: 'valgrind,sanitizer,tls,freebsd,macos,alpine,32bit,iothreads,ubuntu,centos,malloc,specific,fortify,reply-schema,oldTC'\n skiptests:\n description: 'tests to skip (delete the ones you wanna keep, do not leave empty)'\n default: 'redis,modules,sentinel,cluster,unittest'\n test_args:\n description: 'extra test arguments'\n default: ''\n cluster_test_args:\n description: 'extra cluster / sentinel test arguments'\n default: ''\n use_repo:\n description: 'repo owner and name'\n default: 'redis/redis'\n use_git_ref:\n description: 'git branch or sha to use'\n default: 'unstable'\n\n\njobs:\n\n test-ubuntu-jemalloc:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'ubuntu')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make REDIS_CFLAGS='-Werror -DREDIS_TEST'\n - name: testprep\n run: sudo apt-get install tcl8.6 tclx\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n - name: unittest\n if: true && !contains(github.event.inputs.skiptests, 'unittest')\n run: ./src/redis-server test all --accurate\n\n test-ubuntu-jemalloc-fortify:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'fortify')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n apt-get update && apt-get install -y make gcc g++\n make CC=gcc REDIS_CFLAGS='-Werror -DREDIS_TEST -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3'\n - name: testprep\n run: sudo apt-get install -y tcl8.6 tclx procps\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n - name: unittest\n if: true && !contains(github.event.inputs.skiptests, 'unittest')\n run: ./src/redis-server test all --accurate\n\n test-ubuntu-libc-malloc:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'malloc')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make MALLOC=libc REDIS_CFLAGS='-Werror'\n - name: testprep\n run: sudo apt-get install tcl8.6 tclx\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n\n test-ubuntu-no-malloc-usable-size:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'malloc')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make MALLOC=libc CFLAGS=-DNO_MALLOC_USABLE_SIZE REDIS_CFLAGS='-Werror'\n - name: testprep\n run: sudo apt-get install tcl8.6 tclx\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n\n test-ubuntu-32bit:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, '32bit')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n sudo apt-get update && sudo apt-get install libc6-dev-i386 g++ gcc-multilib g++-multilib\n make 32bit REDIS_CFLAGS='-Werror -DREDIS_TEST'\n - name: testprep\n run: sudo apt-get install tcl8.6 tclx\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: |\n make -C tests/modules 32bit # the script below doesn't have an argument, we must build manually ahead of time\n CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n - name: unittest\n if: true && !contains(github.event.inputs.skiptests, 'unittest')\n run: ./src/redis-server test all --accurate\n\n test-ubuntu-tls:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'tls')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n make BUILD_TLS=yes REDIS_CFLAGS='-Werror'\n - name: testprep\n run: |\n sudo apt-get install tcl8.6 tclx tcl-tls\n ./utils/gen-test-certs.sh\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: |\n ./runtest --accurate --verbose --dump-logs --tls --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: |\n CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs --tls --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: |\n ./runtest-sentinel --tls ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: |\n ./runtest-cluster --tls ${{github.event.inputs.cluster_test_args}}\n\n test-ubuntu-tls-no-tls:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'tls')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n make BUILD_TLS=yes REDIS_CFLAGS='-Werror'\n - name: testprep\n run: |\n sudo apt-get install tcl8.6 tclx tcl-tls\n ./utils/gen-test-certs.sh\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: |\n ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: |\n CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: |\n ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: |\n ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n\n test-ubuntu-io-threads:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'iothreads')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n make REDIS_CFLAGS='-Werror'\n - name: testprep\n run: sudo apt-get install tcl8.6 tclx\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --config io-threads 4 --config io-threads-do-reads yes --accurate --verbose --tags network --dump-logs ${{github.event.inputs.test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster --config io-threads 4 --config io-threads-do-reads yes ${{github.event.inputs.cluster_test_args}}\n\n test-ubuntu-reclaim-cache:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'specific')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n make REDIS_CFLAGS='-Werror'\n - name: testprep\n run: |\n sudo apt-get install vmtouch\n mkdir /tmp/master \n mkdir /tmp/slave\n - name: warm up\n run: |\n ./src/redis-server --daemonize yes --logfile /dev/null\n ./src/redis-benchmark -n 1 > /dev/null\n ./src/redis-cli save | grep OK > /dev/null\n vmtouch -v ./dump.rdb > /dev/null\n - name: test\n run: |\n echo "test SAVE doesn't increase cache"\n CACHE0=$(grep -w file /sys/fs/cgroup/memory.stat | awk '{print $2}')\n echo "$CACHE0"\n ./src/redis-server --daemonize yes --logfile /dev/null --dir /tmp/master --port 8080 --repl-diskless-sync no --pidfile /tmp/master/redis.pid --rdbcompression no --enable-debug-command yes\n ./src/redis-cli -p 8080 debug populate 10000 k 102400\n ./src/redis-server --daemonize yes --logfile /dev/null --dir /tmp/slave --port 8081 --repl-diskless-load disabled --rdbcompression no\n ./src/redis-cli -p 8080 save > /dev/null\n VMOUT=$(vmtouch -v /tmp/master/dump.rdb)\n echo $VMOUT\n grep -q " 0%" <<< $VMOUT \n CACHE=$(grep -w file /sys/fs/cgroup/memory.stat | awk '{print $2}')\n echo "$CACHE"\n if [ "$(( $CACHE-$CACHE0 ))" -gt "8000000" ]; then exit 1; fi\n\n echo "test replication doesn't increase cache"\n ./src/redis-cli -p 8081 REPLICAOF 127.0.0.1 8080 > /dev/null\n while [ $(./src/redis-cli -p 8081 info replication | grep "master_link_status:down") ]; do sleep 1; done;\n sleep 1 # wait for the completion of cache reclaim bio\n VMOUT=$(vmtouch -v /tmp/master/dump.rdb)\n echo $VMOUT\n grep -q " 0%" <<< $VMOUT \n VMOUT=$(vmtouch -v /tmp/slave/dump.rdb)\n echo $VMOUT\n grep -q " 0%" <<< $VMOUT \n CACHE=$(grep -w file /sys/fs/cgroup/memory.stat | awk '{print $2}')\n echo "$CACHE"\n if [ "$(( $CACHE-$CACHE0 ))" -gt "8000000" ]; then exit 1; fi\n \n echo "test reboot doesn't increase cache"\n PID=$(cat /tmp/master/redis.pid)\n kill -15 $PID\n while [ -x /proc/${PID} ]; do sleep 1; done\n ./src/redis-server --daemonize yes --logfile /dev/null --dir /tmp/master --port 8080\n while [ $(./src/redis-cli -p 8080 info persistence | grep "loading:1") ]; do sleep 1; done;\n sleep 1 # wait for the completion of cache reclaim bio\n VMOUT=$(vmtouch -v /tmp/master/dump.rdb)\n echo $VMOUT\n grep -q " 0%" <<< $VMOUT\n CACHE=$(grep -w file /sys/fs/cgroup/memory.stat | awk '{print $2}')\n echo "$CACHE"\n if [ "$(( $CACHE-$CACHE0 ))" -gt "8000000" ]; then exit 1; fi\n\n test-valgrind-test:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'valgrind') && !contains(github.event.inputs.skiptests, 'redis')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make valgrind REDIS_CFLAGS='-Werror -DREDIS_TEST'\n - name: testprep\n run: |\n sudo apt-get update\n sudo apt-get install tcl8.6 tclx valgrind g++ -y\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --valgrind --no-latency --verbose --clients 1 --timeout 2400 --dump-logs ${{github.event.inputs.test_args}}\n\n test-valgrind-misc:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'valgrind') && !(contains(github.event.inputs.skiptests, 'modules') && contains(github.event.inputs.skiptests, 'unittest'))\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make valgrind REDIS_CFLAGS='-Werror -DREDIS_TEST'\n - name: testprep\n run: |\n sudo apt-get update\n sudo apt-get install tcl8.6 tclx valgrind -y\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --valgrind --no-latency --verbose --clients 1 --timeout 2400 --dump-logs ${{github.event.inputs.test_args}}\n - name: unittest\n if: true && !contains(github.event.inputs.skiptests, 'unittest')\n run: |\n valgrind --track-origins=yes --suppressions=./src/valgrind.sup --show-reachable=no --show-possibly-lost=no --leak-check=full --log-file=err.txt ./src/redis-server test all --valgrind\n if grep -q 0x err.txt; then cat err.txt; exit 1; fi\n\n test-valgrind-no-malloc-usable-size-test:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'valgrind') && !contains(github.event.inputs.skiptests, 'redis')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make valgrind CFLAGS="-DNO_MALLOC_USABLE_SIZE -DREDIS_TEST" REDIS_CFLAGS='-Werror'\n - name: testprep\n run: |\n sudo apt-get update\n sudo apt-get install tcl8.6 tclx valgrind g++ -y\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --valgrind --no-latency --verbose --clients 1 --timeout 2400 --dump-logs ${{github.event.inputs.test_args}}\n\n test-valgrind-no-malloc-usable-size-misc:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'valgrind') && !(contains(github.event.inputs.skiptests, 'modules') && contains(github.event.inputs.skiptests, 'unittest'))\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make valgrind CFLAGS="-DNO_MALLOC_USABLE_SIZE -DREDIS_TEST" REDIS_CFLAGS='-Werror'\n - name: testprep\n run: |\n sudo apt-get update\n sudo apt-get install tcl8.6 tclx valgrind -y\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --valgrind --no-latency --verbose --clients 1 --timeout 2400 --dump-logs ${{github.event.inputs.test_args}}\n - name: unittest\n if: true && !contains(github.event.inputs.skiptests, 'unittest')\n run: |\n valgrind --track-origins=yes --suppressions=./src/valgrind.sup --show-reachable=no --show-possibly-lost=no --leak-check=full --log-file=err.txt ./src/redis-server test all --valgrind\n if grep -q 0x err.txt; then cat err.txt; exit 1; fi\n\n test-sanitizer-address:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'sanitizer')\n timeout-minutes: 14400\n strategy:\n matrix:\n compiler: [ gcc, clang ]\n env:\n CC: ${{ matrix.compiler }}\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make SANITIZER=address REDIS_CFLAGS='-DREDIS_TEST -Werror -DDEBUG_ASSERTIONS'\n - name: testprep\n run: |\n sudo apt-get update\n sudo apt-get install tcl8.6 tclx -y\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n - name: unittest\n if: true && !contains(github.event.inputs.skiptests, 'unittest')\n run: ./src/redis-server test all\n\n test-sanitizer-undefined:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'sanitizer')\n timeout-minutes: 14400\n strategy:\n matrix:\n compiler: [ gcc, clang ]\n env:\n CC: ${{ matrix.compiler }}\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make SANITIZER=undefined REDIS_CFLAGS='-DREDIS_TEST -Werror' SKIP_VEC_SETS=yes LUA_DEBUG=yes # we (ab)use this flow to also check Lua C API violations\n - name: testprep\n run: |\n sudo apt-get update\n sudo apt-get install tcl8.6 tclx -y\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n - name: unittest\n if: true && !contains(github.event.inputs.skiptests, 'unittest')\n run: ./src/redis-server test all --accurate\n\n test-centos-jemalloc:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'centos')\n container: quay.io/centos/centos:stream9\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n dnf -y install which gcc make g++\n make REDIS_CFLAGS='-Werror'\n - name: testprep\n run: |\n dnf -y install epel-release\n dnf -y install tcl tcltls procps-ng /usr/bin/kill\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n\n test-centos-tls-module:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'tls')\n container: quay.io/centos/centos:stream9\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n dnf -y install which gcc make openssl-devel openssl g++\n make BUILD_TLS=module REDIS_CFLAGS='-Werror'\n - name: testprep\n run: |\n dnf -y install epel-release\n dnf -y install tcl tcltls procps-ng /usr/bin/kill\n ./utils/gen-test-certs.sh\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: |\n ./runtest --accurate --verbose --dump-logs --tls-module --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: |\n CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs --tls-module --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: |\n ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: |\n ./runtest-cluster --tls-module ${{github.event.inputs.cluster_test_args}}\n\n test-centos-tls-module-no-tls:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'tls')\n container: quay.io/centos/centos:stream9\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n dnf -y install which gcc make openssl-devel openssl g++\n make BUILD_TLS=module REDIS_CFLAGS='-Werror'\n - name: testprep\n run: |\n dnf -y install epel-release\n dnf -y install tcl tcltls procps-ng /usr/bin/kill\n ./utils/gen-test-certs.sh\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: |\n ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: |\n CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: |\n ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: |\n ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n\n test-macos-latest:\n runs-on: macos-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'macos') && !(contains(github.event.inputs.skiptests, 'redis') && contains(github.event.inputs.skiptests, 'modules'))\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make REDIS_CFLAGS='-Werror'\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --clients 1 --no-latency --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --clients 1 --no-latency --dump-logs ${{github.event.inputs.test_args}}\n\n test-macos-latest-sentinel:\n runs-on: macos-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'macos') && !contains(github.event.inputs.skiptests, 'sentinel')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make REDIS_CFLAGS='-Werror'\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n\n test-macos-latest-cluster:\n runs-on: macos-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'macos') && !contains(github.event.inputs.skiptests, 'cluster')\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make REDIS_CFLAGS='-Werror'\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n\n build-macos:\n strategy:\n matrix:\n os: [macos-13, macos-15]\n runs-on: ${{ matrix.os }}\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'macos')\n timeout-minutes: 14400\n steps:\n - uses: maxim-lobanov/setup-xcode@v1\n with:\n xcode-version: latest\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make REDIS_CFLAGS='-Werror -DREDIS_TEST'\n\n test-freebsd:\n runs-on: macos-13\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'freebsd')\n timeout-minutes: 14400\n env:\n CC: clang\n CXX: clang++\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: test\n uses: cross-platform-actions/action@v0.22.0\n with:\n operating_system: freebsd\n environment_variables: MAKE\n version: 13.2\n shell: bash\n run: |\n sudo pkg install -y bash gmake lang/tcl86 lang/tclx gcc\n gmake\n ./runtest --single unit/keyspace --single unit/auth --single unit/networking --single unit/protocol\n\n test-alpine-jemalloc:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'alpine')\n container: alpine:latest\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n apk add build-base\n make REDIS_CFLAGS='-Werror'\n - name: testprep\n run: apk add tcl procps tclx\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n\n test-alpine-libc-malloc:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'alpine')\n container: alpine:latest\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n apk add build-base\n make REDIS_CFLAGS='-Werror' USE_JEMALLOC=no CFLAGS=-DUSE_MALLOC_USABLE_SIZE\n - name: testprep\n run: apk add tcl procps tclx\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n\n reply-schemas-validator:\n runs-on: ubuntu-latest\n timeout-minutes: 14400\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'reply-schema')\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: make REDIS_CFLAGS='-Werror -DLOG_REQ_RES'\n - name: testprep\n run: sudo apt-get install tcl8.6 tclx\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --log-req-res --no-latency --dont-clean --force-resp3 --tags -slow --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --log-req-res --no-latency --dont-clean --force-resp3 --dont-pre-clean --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel --log-req-res --dont-clean --force-resp3 ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster --log-req-res --dont-clean --force-resp3 ${{github.event.inputs.cluster_test_args}}\n - name: Install Python dependencies\n uses: py-actions/py-dependency-install@v4\n with:\n path: "./utils/req-res-validator/requirements.txt"\n - name: validator\n run: ./utils/req-res-log-validator.py --verbose --fail-missing-reply-schemas ${{ (!contains(github.event.inputs.skiptests, 'redis') && !contains(github.event.inputs.skiptests, 'module') && !contains(github.event.inputs.sentinel, 'redis') && !contains(github.event.inputs.skiptests, 'cluster')) && github.event.inputs.test_args == '' && github.event.inputs.cluster_test_args == '' && '--fail-commands-not-all-hit' || '' }}\n\n test-old-chain-jemalloc:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'oldTC')\n container: ubuntu:20.04\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n apt-get update\n apt-get install -y gnupg2\n echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial main" >> /etc/apt/sources.list\n echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial universe" >> /etc/apt/sources.list\n apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 40976EAF437D05B5\n apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32\n apt-get update\n apt-get install -y make gcc-4.8 g++-4.8\n update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.8 100\n update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-4.8 100\n make CC=gcc REDIS_CFLAGS='-Werror'\n - name: testprep\n run: apt-get install -y tcl tcltls tclx\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n\n test-old-chain-tls-module:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'tls') && !contains(github.event.inputs.skipjobs, 'oldTC')\n container: ubuntu:20.04\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n apt-get update\n apt-get install -y gnupg2\n echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial main" >> /etc/apt/sources.list\n echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial universe" >> /etc/apt/sources.list\n apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 40976EAF437D05B5\n apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32\n apt-get update\n apt-get install -y make gcc-4.8 g++-4.8 openssl libssl-dev\n update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.8 100\n update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-4.8 100\n make CC=gcc CXX=g++ BUILD_TLS=module REDIS_CFLAGS='-Werror'\n - name: testprep\n run: |\n apt-get install -y tcl tcltls tclx\n ./utils/gen-test-certs.sh\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: |\n ./runtest --accurate --verbose --dump-logs --tls-module --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: |\n CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs --tls-module --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: |\n ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: |\n ./runtest-cluster --tls-module ${{github.event.inputs.cluster_test_args}}\n\n test-old-chain-tls-module-no-tls:\n runs-on: ubuntu-latest\n if: |\n (github.event_name == 'workflow_dispatch' || (github.event_name != 'workflow_dispatch' && github.repository == 'redis/redis')) &&\n !contains(github.event.inputs.skipjobs, 'tls') && !contains(github.event.inputs.skipjobs, 'oldTC')\n container: ubuntu:20.04\n timeout-minutes: 14400\n steps:\n - name: prep\n if: github.event_name == 'workflow_dispatch'\n run: |\n echo "GITHUB_REPOSITORY=${{github.event.inputs.use_repo}}" >> $GITHUB_ENV\n echo "GITHUB_HEAD_REF=${{github.event.inputs.use_git_ref}}" >> $GITHUB_ENV\n echo "skipjobs: ${{github.event.inputs.skipjobs}}"\n echo "skiptests: ${{github.event.inputs.skiptests}}"\n echo "test_args: ${{github.event.inputs.test_args}}"\n echo "cluster_test_args: ${{github.event.inputs.cluster_test_args}}"\n - uses: actions/checkout@v4\n with:\n repository: ${{ env.GITHUB_REPOSITORY }}\n ref: ${{ env.GITHUB_HEAD_REF }}\n - name: make\n run: |\n apt-get update\n apt-get install -y gnupg2 \n echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial main" >> /etc/apt/sources.list\n echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial universe" >> /etc/apt/sources.list\n apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 40976EAF437D05B5\n apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32\n apt-get update\n apt-get install -y make gcc-4.8 g++-4.8 openssl libssl-dev\n update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.8 100\n update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-4.8 100\n make BUILD_TLS=module CC=gcc REDIS_CFLAGS='-Werror'\n - name: testprep\n run: |\n apt-get install -y tcl tcltls tclx\n ./utils/gen-test-certs.sh\n - name: test\n if: true && !contains(github.event.inputs.skiptests, 'redis')\n run: |\n ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: module api test\n if: true && !contains(github.event.inputs.skiptests, 'modules')\n run: |\n CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}\n - name: sentinel tests\n if: true && !contains(github.event.inputs.skiptests, 'sentinel')\n run: |\n ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}\n - name: cluster tests\n if: true && !contains(github.event.inputs.skiptests, 'cluster')\n run: |\n ./runtest-cluster ${{github.event.inputs.cluster_test_args}}\n | dataset_sample\yaml\antirez_redis\.github\workflows\daily.yml | daily.yml | YAML | 56,945 | 0.75 | 0.129645 | 0.00085 | node-utils | 88 | 2023-12-31T09:05:50.641562 | Apache-2.0 | false | b42d4bf3553cf543b04655cab0146382 |
name: External Server Tests\n\non:\n pull_request:\n push:\n schedule:\n - cron: '0 0 * * *'\n\njobs:\n test-external-standalone:\n runs-on: ubuntu-latest\n if: github.event_name != 'schedule' || github.repository == 'redis/redis'\n timeout-minutes: 14400\n steps:\n - uses: actions/checkout@v4\n - name: Build\n run: make REDIS_CFLAGS=-Werror\n - name: Start redis-server\n run: |\n ./src/redis-server --daemonize yes --save "" --logfile external-redis.log \\n --enable-protected-configs yes --enable-debug-command yes --enable-module-command yes\n - name: Run external test\n run: |\n ./runtest \\n --host 127.0.0.1 --port 6379 \\n --verbose \\n --tags -slow\n - name: Archive redis log\n if: ${{ failure() }}\n uses: actions/upload-artifact@v4\n with:\n name: test-external-redis-log\n path: external-redis.log\n\n test-external-cluster:\n runs-on: ubuntu-latest\n if: github.event_name != 'schedule' || github.repository == 'redis/redis'\n timeout-minutes: 14400\n steps:\n - uses: actions/checkout@v4\n - name: Build\n run: make REDIS_CFLAGS=-Werror\n - name: Start redis-server\n run: |\n ./src/redis-server --cluster-enabled yes --daemonize yes --save "" --logfile external-redis-cluster.log \\n --enable-protected-configs yes --enable-debug-command yes --enable-module-command yes\n - name: Create a single node cluster\n run: ./src/redis-cli cluster addslots $(for slot in {0..16383}; do echo $slot; done); sleep 5\n - name: Run external test\n run: |\n ./runtest \\n --host 127.0.0.1 --port 6379 \\n --verbose \\n --cluster-mode \\n --tags -slow\n - name: Archive redis log\n if: ${{ failure() }}\n uses: actions/upload-artifact@v4\n with:\n name: test-external-cluster-log\n path: external-redis-cluster.log\n\n test-external-nodebug:\n runs-on: ubuntu-latest\n if: github.event_name != 'schedule' || github.repository == 'redis/redis'\n timeout-minutes: 14400\n steps:\n - uses: actions/checkout@v4\n - name: Build\n run: make REDIS_CFLAGS=-Werror\n - name: Start redis-server\n run: |\n ./src/redis-server --daemonize yes --save "" --logfile external-redis-nodebug.log\n - name: Run external test\n run: |\n ./runtest \\n --host 127.0.0.1 --port 6379 \\n --verbose \\n --tags "-slow -needs:debug"\n - name: Archive redis log\n if: ${{ failure() }}\n uses: actions/upload-artifact@v4\n with:\n name: test-external-redis-nodebug-log\n path: external-redis-nodebug.log\n | dataset_sample\yaml\antirez_redis\.github\workflows\external.yml | external.yml | YAML | 2,737 | 0.7 | 0.082353 | 0 | react-lib | 527 | 2024-05-25T06:09:20.484041 | Apache-2.0 | false | 020317daca0fb9e8a4b323d927de4419 |
name: Reply-schemas linter\n\non:\n push:\n paths:\n - 'src/commands/*.json'\n pull_request:\n paths:\n - 'src/commands/*.json'\n\njobs:\n reply-schemas-linter:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - name: Setup nodejs\n uses: actions/setup-node@v4\n - name: Install packages\n run: npm install ajv\n - name: linter\n run: node ./utils/reply_schema_linter.js\n\n | dataset_sample\yaml\antirez_redis\.github\workflows\reply-schemas-linter.yml | reply-schemas-linter.yml | YAML | 435 | 0.8 | 0 | 0 | python-kit | 815 | 2023-08-08T21:51:07.554603 | BSD-3-Clause | false | 61daca375315b34142c42809f417a66c |
# A CI action that using codespell to check spell.\n# .github/.codespellrc is a config file.\n# .github/wordlist.txt is a list of words that will ignore word checks.\n# More details please check the following link:\n# https://github.com/codespell-project/codespell\nname: Spellcheck\n\non:\n push:\n pull_request:\n\njobs:\n build:\n name: Spellcheck\n runs-on: ubuntu-latest\n\n steps:\n - name: Checkout repository\n uses: actions/checkout@v4\n\n - name: pip cache\n uses: actions/cache@v4\n with:\n path: ~/.cache/pip\n key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}\n restore-keys: ${{ runner.os }}-pip-\n\n - name: Install prerequisites\n run: sudo pip install -r ./.codespell/requirements.txt\n\n - name: Spell check\n run: codespell --config=./.codespell/.codespellrc\n | dataset_sample\yaml\antirez_redis\.github\workflows\spell-check.yml | spell-check.yml | YAML | 857 | 0.95 | 0 | 0.192308 | awesome-app | 586 | 2024-08-24T10:19:19.493547 | BSD-3-Clause | false | 87a65031e6f988998fc1788b63a2a44e |
language: c\ncompiler:\n - gcc\n - clang\n\nos:\n - linux\n - osx\n\ndist: bionic\n\nbranches:\n only:\n - staging\n - trying\n - master\n - /^release\/.*$/\n\ninstall:\n - if [ "$TRAVIS_COMPILER" != "mingw" ]; then\n wget https://github.com/redis/redis/archive/6.0.6.tar.gz;\n tar -xzvf 6.0.6.tar.gz;\n pushd redis-6.0.6 && BUILD_TLS=yes make && export PATH=$PWD/src:$PATH && popd;\n fi;\n\nbefore_script:\n - if [ "$TRAVIS_OS_NAME" == "osx" ]; then\n curl -O https://distfiles.macports.org/MacPorts/MacPorts-2.6.2-10.13-HighSierra.pkg;\n sudo installer -pkg MacPorts-2.6.2-10.13-HighSierra.pkg -target /;\n export PATH=$PATH:/opt/local/bin && sudo port -v selfupdate;\n sudo port -N install openssl redis;\n fi;\n\naddons:\n apt:\n packages:\n - libc6-dbg\n - libc6-dev\n - libc6:i386\n - libc6-dev-i386\n - libc6-dbg:i386\n - gcc-multilib\n - g++-multilib\n - libssl-dev\n - libssl-dev:i386\n - valgrind\n\nenv:\n - BITS="32"\n - BITS="64"\n\nscript:\n - EXTRA_CMAKE_OPTS="-DENABLE_EXAMPLES:BOOL=ON -DENABLE_SSL:BOOL=ON -DENABLE_SSL_TESTS:BOOL=ON";\n if [ "$TRAVIS_OS_NAME" == "osx" ]; then\n if [ "$BITS" == "32" ]; then\n CFLAGS="-m32 -Werror";\n CXXFLAGS="-m32 -Werror";\n LDFLAGS="-m32";\n EXTRA_CMAKE_OPTS=;\n else\n CFLAGS="-Werror";\n CXXFLAGS="-Werror";\n fi;\n else\n TEST_PREFIX="valgrind --track-origins=yes --leak-check=full";\n if [ "$BITS" == "32" ]; then\n CFLAGS="-m32 -Werror";\n CXXFLAGS="-m32 -Werror";\n LDFLAGS="-m32";\n EXTRA_CMAKE_OPTS=;\n else\n CFLAGS="-Werror";\n CXXFLAGS="-Werror";\n fi;\n fi;\n export CFLAGS CXXFLAGS LDFLAGS TEST_PREFIX EXTRA_CMAKE_OPTS\n - make && make clean;\n if [ "$TRAVIS_OS_NAME" == "osx" ]; then\n if [ "$BITS" == "64" ]; then\n OPENSSL_PREFIX="$(ls -d /usr/local/Cellar/openssl@1.1/*)" USE_SSL=1 make;\n fi;\n else\n USE_SSL=1 make;\n fi;\n - mkdir build/ && cd build/\n - cmake .. ${EXTRA_CMAKE_OPTS}\n - make VERBOSE=1\n - if [ "$BITS" == "64" ]; then\n TEST_SSL=1 SKIPS_AS_FAILS=1 ctest -V;\n else\n SKIPS_AS_FAILS=1 ctest -V;\n fi;\n\njobs:\n include:\n # Windows MinGW cross compile on Linux\n - os: linux\n dist: xenial\n compiler: mingw\n addons:\n apt:\n packages:\n - ninja-build\n - gcc-mingw-w64-x86-64\n - g++-mingw-w64-x86-64\n script:\n - mkdir build && cd build\n - CC=x86_64-w64-mingw32-gcc CXX=x86_64-w64-mingw32-g++ cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_BUILD_WITH_INSTALL_RPATH=on\n - ninja -v\n\n # Windows MSVC 2017\n - os: windows\n compiler: msvc\n env:\n - MATRIX_EVAL="CC=cl.exe && CXX=cl.exe"\n before_install:\n - eval "${MATRIX_EVAL}"\n install:\n - choco install ninja\n - choco install -y memurai-developer\n script:\n - mkdir build && cd build\n - cmd.exe //C 'C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\VC\Auxiliary\Build\vcvarsall.bat' amd64 '&&'\n cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_EXAMPLES=ON '&&' ninja -v\n - ./hiredis-test.exe\n | dataset_sample\yaml\antirez_redis\deps\hiredis\.travis.yml | .travis.yml | YAML | 3,242 | 0.8 | 0.064 | 0.017391 | awesome-app | 534 | 2025-01-21T20:28:40.633377 | GPL-3.0 | false | 6fd309bd0f8c44fc48c850c9abd3763a |
# Appveyor configuration file for CI build of hiredis on Windows (under Cygwin)\nenvironment:\n matrix:\n - CYG_BASH: C:\cygwin64\bin\bash\n CC: gcc\n - CYG_BASH: C:\cygwin\bin\bash\n CC: gcc\n CFLAGS: -m32\n CXXFLAGS: -m32\n LDFLAGS: -m32\n\nclone_depth: 1\n\n# Attempt to ensure we don't try to convert line endings to Win32 CRLF as this will cause build to fail\ninit:\n - git config --global core.autocrlf input\n\n# Install needed build dependencies\ninstall:\n - '%CYG_BASH% -lc "cygcheck -dc cygwin"'\n\nbuild_script:\n - 'echo building...'\n - '%CYG_BASH% -lc "cd $APPVEYOR_BUILD_FOLDER; exec 0</dev/null; mkdir build && cd build && cmake .. -G \"Unix Makefiles\" && make VERBOSE=1"'\n | dataset_sample\yaml\antirez_redis\deps\hiredis\appveyor.yml | appveyor.yml | YAML | 743 | 0.8 | 0.083333 | 0.15 | python-kit | 131 | 2024-08-15T17:11:43.721984 | GPL-3.0 | false | b59a03092677a1a9eb23e9a2a512c339 |
name: Build and test\non: [push, pull_request]\n\njobs:\n ubuntu:\n name: Ubuntu\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n\n - name: Install dependencies\n run: |\n curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg\n echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/redis.list\n sudo apt-get update\n sudo apt-get install -y redis-server valgrind libevent-dev\n\n - name: Build using cmake\n env:\n EXTRA_CMAKE_OPTS: -DENABLE_EXAMPLES:BOOL=ON -DENABLE_SSL:BOOL=ON -DENABLE_SSL_TESTS:BOOL=ON -DENABLE_ASYNC_TESTS:BOOL=ON\n CFLAGS: -Werror\n CXXFLAGS: -Werror\n run: mkdir build && cd build && cmake .. && make\n\n - name: Build using makefile\n run: USE_SSL=1 TEST_ASYNC=1 make\n\n - name: Run tests\n env:\n SKIPS_AS_FAILS: 1\n TEST_SSL: 1\n run: $GITHUB_WORKSPACE/test.sh\n\n # - name: Run tests under valgrind\n # env:\n # SKIPS_AS_FAILS: 1\n # TEST_PREFIX: valgrind --error-exitcode=99 --track-origins=yes --leak-check=full\n # run: $GITHUB_WORKSPACE/test.sh\n\n centos7:\n name: CentOS 7\n runs-on: ubuntu-latest\n container: centos:7\n steps:\n - uses: actions/checkout@v3\n\n - name: Install dependencies\n run: |\n yum -y install http://rpms.remirepo.net/enterprise/remi-release-7.rpm\n yum -y --enablerepo=remi install redis\n yum -y install gcc gcc-c++ make openssl openssl-devel cmake3 valgrind libevent-devel\n\n - name: Build using cmake\n env:\n EXTRA_CMAKE_OPTS: -DENABLE_EXAMPLES:BOOL=ON -DENABLE_SSL:BOOL=ON -DENABLE_SSL_TESTS:BOOL=ON -DENABLE_ASYNC_TESTS:BOOL=ON\n CFLAGS: -Werror\n CXXFLAGS: -Werror\n run: mkdir build && cd build && cmake3 .. && make\n\n - name: Build using Makefile\n run: USE_SSL=1 TEST_ASYNC=1 make\n\n - name: Run tests\n env:\n SKIPS_AS_FAILS: 1\n TEST_SSL: 1\n run: $GITHUB_WORKSPACE/test.sh\n\n - name: Run tests under valgrind\n env:\n SKIPS_AS_FAILS: 1\n TEST_SSL: 1\n TEST_PREFIX: valgrind --error-exitcode=99 --track-origins=yes --leak-check=full\n run: $GITHUB_WORKSPACE/test.sh\n\n centos8:\n name: RockyLinux 8\n runs-on: ubuntu-latest\n container: rockylinux:8\n steps:\n - uses: actions/checkout@v3\n\n - name: Install dependencies\n run: |\n dnf -y upgrade --refresh\n dnf -y install https://rpms.remirepo.net/enterprise/remi-release-8.rpm\n dnf -y module install redis:remi-6.0\n dnf -y group install "Development Tools"\n dnf -y install openssl-devel cmake valgrind libevent-devel\n\n - name: Build using cmake\n env:\n EXTRA_CMAKE_OPTS: -DENABLE_EXAMPLES:BOOL=ON -DENABLE_SSL:BOOL=ON -DENABLE_SSL_TESTS:BOOL=ON -DENABLE_ASYNC_TESTS:BOOL=ON\n CFLAGS: -Werror\n CXXFLAGS: -Werror\n run: mkdir build && cd build && cmake .. && make\n\n - name: Build using Makefile\n run: USE_SSL=1 TEST_ASYNC=1 make\n\n - name: Run tests\n env:\n SKIPS_AS_FAILS: 1\n TEST_SSL: 1\n run: $GITHUB_WORKSPACE/test.sh\n\n - name: Run tests under valgrind\n env:\n SKIPS_AS_FAILS: 1\n TEST_SSL: 1\n TEST_PREFIX: valgrind --error-exitcode=99 --track-origins=yes --leak-check=full\n run: $GITHUB_WORKSPACE/test.sh\n\n freebsd:\n runs-on: macos-12\n name: FreeBSD\n steps:\n - uses: actions/checkout@v3\n\n - name: Build in FreeBSD\n uses: vmactions/freebsd-vm@v0\n with:\n prepare: pkg install -y gmake cmake\n run: |\n mkdir build && cd build && cmake .. && make && cd ..\n gmake\n\n macos:\n name: macOS\n runs-on: macos-latest\n steps:\n - uses: actions/checkout@v3\n\n - name: Install dependencies\n run: |\n brew install openssl redis@7.0\n brew link redis@7.0 --force\n\n - name: Build hiredis\n run: USE_SSL=1 make\n\n - name: Run tests\n env:\n TEST_SSL: 1\n run: $GITHUB_WORKSPACE/test.sh\n\n windows:\n name: Windows\n runs-on: windows-latest\n steps:\n - uses: actions/checkout@v3\n\n - name: Install dependencies\n run: |\n choco install -y ninja memurai-developer\n\n - uses: ilammy/msvc-dev-cmd@v1\n - name: Build hiredis\n run: |\n mkdir build && cd build\n cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_EXAMPLES=ON\n ninja -v\n\n - name: Run tests\n run: |\n ./build/hiredis-test.exe\n\n - name: Install Cygwin Action\n uses: cygwin/cygwin-install-action@v2\n with:\n packages: make git gcc-core\n\n - name: Build in cygwin\n env:\n HIREDIS_PATH: ${{ github.workspace }}\n run: |\n make clean && make\n | dataset_sample\yaml\antirez_redis\deps\hiredis\.github\workflows\build.yml | build.yml | YAML | 5,131 | 0.8 | 0 | 0.034014 | awesome-app | 834 | 2025-02-18T17:04:43.215694 | Apache-2.0 | false | df7a1415bc54cb20d7a7fcce6d68c69b |
version: '{build}'\n\nenvironment:\n matrix:\n - MSYSTEM: MINGW64\n CPU: x86_64\n MSVC: amd64\n CONFIG_FLAGS: --enable-debug\n - MSYSTEM: MINGW64\n CPU: x86_64\n CONFIG_FLAGS: --enable-debug\n - MSYSTEM: MINGW32\n CPU: i686\n MSVC: x86\n CONFIG_FLAGS: --enable-debug\n - MSYSTEM: MINGW32\n CPU: i686\n CONFIG_FLAGS: --enable-debug\n - MSYSTEM: MINGW64\n CPU: x86_64\n MSVC: amd64\n - MSYSTEM: MINGW64\n CPU: x86_64\n - MSYSTEM: MINGW32\n CPU: i686\n MSVC: x86\n - MSYSTEM: MINGW32\n CPU: i686\n\ninstall:\n - set PATH=c:\msys64\%MSYSTEM%\bin;c:\msys64\usr\bin;%PATH%\n - if defined MSVC call "c:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" %MSVC%\n - if defined MSVC pacman --noconfirm -Rsc mingw-w64-%CPU%-gcc gcc\n\nbuild_script:\n - bash -c "autoconf"\n - bash -c "./configure $CONFIG_FLAGS"\n - mingw32-make\n - file lib/jemalloc.dll\n - mingw32-make tests\n - mingw32-make -k check\n | dataset_sample\yaml\antirez_redis\deps\jemalloc\.appveyor.yml | .appveyor.yml | YAML | 936 | 0.7 | 0.04878 | 0 | vue-tools | 269 | 2023-11-01T13:12:54.817928 | BSD-3-Clause | false | 0a826b2304a14ba00e7f89b725054312 |
env:\n CIRRUS_CLONE_DEPTH: 1\n ARCH: amd64\n\ntask:\n matrix:\n env:\n DEBUG_CONFIG: --enable-debug\n env:\n DEBUG_CONFIG: --disable-debug\n matrix:\n - env:\n PROF_CONFIG: --enable-prof\n - env:\n PROF_CONFIG: --disable-prof\n matrix:\n - name: 64-bit\n env:\n CC:\n CXX:\n - name: 32-bit\n env:\n CC: cc -m32\n CXX: c++ -m32\n matrix:\n - env:\n UNCOMMON_CONFIG:\n - env:\n UNCOMMON_CONFIG: --with-lg-page=16 --with-malloc-conf=tcache:false\n freebsd_instance:\n matrix:\n image: freebsd-12-3-release-amd64\n install_script:\n - sed -i.bak -e 's,pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly,pkg+http://pkg.FreeBSD.org/\${ABI}/latest,' /etc/pkg/FreeBSD.conf\n - pkg upgrade -y\n - pkg install -y autoconf gmake\n script:\n - autoconf\n # We don't perfectly track freebsd stdlib.h definitions. This is fine when\n # we count as a system header, but breaks otherwise, like during these\n # tests.\n - ./configure --with-jemalloc-prefix=ci_ ${DEBUG_CONFIG} ${PROF_CONFIG} ${UNCOMMON_CONFIG}\n - export JFLAG=`sysctl -n kern.smp.cpus`\n - gmake -j${JFLAG}\n - gmake -j${JFLAG} tests\n - gmake check\n | dataset_sample\yaml\antirez_redis\deps\jemalloc\.cirrus.yml | .cirrus.yml | YAML | 1,213 | 0.8 | 0 | 0.066667 | python-kit | 516 | 2023-12-30T03:23:55.957887 | Apache-2.0 | false | ac7615a80ea4c3ad7c08f90aa1b8295b |
# This config file is generated by ./scripts/gen_travis.py.\n# Do not edit by hand.\n\n# We use 'minimal', because 'generic' makes Windows VMs hang at startup. Also\n# the software provided by 'generic' is simply not needed for our tests.\n# Differences are explained here:\n# https://docs.travis-ci.com/user/languages/minimal-and-generic/\nlanguage: minimal\ndist: focal\n\njobs:\n include:\n - os: windows\n arch: amd64\n env: CC=gcc CXX=g++ EXTRA_CFLAGS="-fcommon"\n - os: windows\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug" EXTRA_CFLAGS="-fcommon"\n - os: windows\n arch: amd64\n env: CC=cl.exe CXX=cl.exe\n - os: windows\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes EXTRA_CFLAGS="-fcommon"\n - os: windows\n arch: amd64\n env: CC=cl.exe CXX=cl.exe CONFIGURE_FLAGS="--enable-debug"\n - os: windows\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes CONFIGURE_FLAGS="--enable-debug" EXTRA_CFLAGS="-fcommon"\n - os: windows\n arch: amd64\n env: CC=cl.exe CXX=cl.exe CROSS_COMPILE_32BIT=yes\n - os: windows\n arch: amd64\n env: CC=cl.exe CXX=cl.exe CROSS_COMPILE_32BIT=yes CONFIGURE_FLAGS="--enable-debug"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof --enable-prof-libunwind"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-lg-page=16 --with-malloc-conf=tcache:false"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --enable-prof --enable-prof-libunwind"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --with-lg-page=16 --with-malloc-conf=tcache:false"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes CONFIGURE_FLAGS="--enable-debug"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof --enable-prof-libunwind --with-lg-page=16 --with-malloc-conf=tcache:false"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes CONFIGURE_FLAGS="--enable-prof --enable-prof-libunwind"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes CONFIGURE_FLAGS="--with-lg-page=16 --with-malloc-conf=tcache:false"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --enable-prof --enable-prof-libunwind --with-lg-page=16 --with-malloc-conf=tcache:false"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes CONFIGURE_FLAGS="--enable-debug --enable-prof --enable-prof-libunwind"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes CONFIGURE_FLAGS="--enable-debug --with-lg-page=16 --with-malloc-conf=tcache:false"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes CONFIGURE_FLAGS="--enable-prof --enable-prof-libunwind --with-lg-page=16 --with-malloc-conf=tcache:false"\n - os: freebsd\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes CONFIGURE_FLAGS="--enable-debug --enable-prof --enable-prof-libunwind --with-lg-page=16 --with-malloc-conf=tcache:false"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-stats" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-libdl" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-opt-safety-checks" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-lg-page=16" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=tcache:false" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=dss:primary" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ CONFIGURE_FLAGS="--enable-debug" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ CONFIGURE_FLAGS="--enable-prof" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ CONFIGURE_FLAGS="--disable-stats" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ CONFIGURE_FLAGS="--disable-libdl" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ CONFIGURE_FLAGS="--enable-opt-safety-checks" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ CONFIGURE_FLAGS="--with-lg-page=16" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ CONFIGURE_FLAGS="--with-malloc-conf=tcache:false" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ CONFIGURE_FLAGS="--with-malloc-conf=dss:primary" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ CONFIGURE_FLAGS="--with-malloc-conf=percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=clang CXX=clang++ CONFIGURE_FLAGS="--with-malloc-conf=background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" CONFIGURE_FLAGS="--enable-debug" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" CONFIGURE_FLAGS="--enable-prof" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" CONFIGURE_FLAGS="--disable-stats" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" CONFIGURE_FLAGS="--disable-libdl" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" CONFIGURE_FLAGS="--enable-opt-safety-checks" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" CONFIGURE_FLAGS="--with-lg-page=16" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" CONFIGURE_FLAGS="--with-malloc-conf=tcache:false" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" CONFIGURE_FLAGS="--with-malloc-conf=dss:primary" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" CONFIGURE_FLAGS="--with-malloc-conf=percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes COMPILER_FLAGS="-m32" CONFIGURE_FLAGS="--with-malloc-conf=background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --enable-prof" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --disable-stats" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --disable-libdl" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --enable-opt-safety-checks" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --with-lg-page=16" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --with-malloc-conf=tcache:false" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --with-malloc-conf=dss:primary" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --with-malloc-conf=percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --with-malloc-conf=background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof --disable-stats" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof --disable-libdl" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof --enable-opt-safety-checks" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof --with-lg-page=16" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof --with-malloc-conf=tcache:false" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof --with-malloc-conf=dss:primary" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof --with-malloc-conf=percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof --with-malloc-conf=background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-stats --disable-libdl" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-stats --enable-opt-safety-checks" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-stats --with-lg-page=16" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-stats --with-malloc-conf=tcache:false" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-stats --with-malloc-conf=dss:primary" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-stats --with-malloc-conf=percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-stats --with-malloc-conf=background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-libdl --enable-opt-safety-checks" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-libdl --with-lg-page=16" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-libdl --with-malloc-conf=tcache:false" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-libdl --with-malloc-conf=dss:primary" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-libdl --with-malloc-conf=percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-libdl --with-malloc-conf=background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-opt-safety-checks --with-lg-page=16" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-opt-safety-checks --with-malloc-conf=tcache:false" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-opt-safety-checks --with-malloc-conf=dss:primary" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-opt-safety-checks --with-malloc-conf=percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-opt-safety-checks --with-malloc-conf=background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-lg-page=16 --with-malloc-conf=tcache:false" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-lg-page=16 --with-malloc-conf=dss:primary" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-lg-page=16 --with-malloc-conf=percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-lg-page=16 --with-malloc-conf=background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=tcache:false,dss:primary" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=tcache:false,percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=tcache:false,background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=dss:primary,percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=dss:primary,background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=percpu_arena:percpu,background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: ppc64le\n env: CC=gcc CXX=g++ EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: ppc64le\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: ppc64le\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-prof" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: ppc64le\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-stats" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: ppc64le\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-libdl" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: ppc64le\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-opt-safety-checks" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: ppc64le\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-lg-page=16" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: ppc64le\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=tcache:false" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: ppc64le\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=dss:primary" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: ppc64le\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=percpu_arena:percpu" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: linux\n arch: ppc64le\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=background_thread:true" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n - os: osx\n arch: amd64\n env: CC=gcc CXX=g++ EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes -Wno-deprecated-declarations"\n - os: osx\n arch: amd64\n env: CC=gcc CXX=g++ CROSS_COMPILE_32BIT=yes EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes -Wno-deprecated-declarations"\n - os: osx\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes -Wno-deprecated-declarations"\n - os: osx\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-stats" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes -Wno-deprecated-declarations"\n - os: osx\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--disable-libdl" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes -Wno-deprecated-declarations"\n - os: osx\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-opt-safety-checks" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes -Wno-deprecated-declarations"\n - os: osx\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-lg-page=16" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes -Wno-deprecated-declarations"\n - os: osx\n arch: amd64\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--with-malloc-conf=tcache:false" EXTRA_CFLAGS="-Werror -Wno-array-bounds -Wno-unknown-warning-option -Wno-ignored-attributes -Wno-deprecated-declarations"\n # Development build\n - os: linux\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --disable-cache-oblivious --enable-stats --enable-log --enable-prof" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n # --enable-expermental-smallocx:\n - os: linux\n env: CC=gcc CXX=g++ CONFIGURE_FLAGS="--enable-debug --enable-experimental-smallocx --enable-stats --enable-prof" EXTRA_CFLAGS="-Werror -Wno-array-bounds"\n\n\nbefore_install:\n - |-\n if test -f "./scripts/$TRAVIS_OS_NAME/before_install.sh"; then\n source ./scripts/$TRAVIS_OS_NAME/before_install.sh\n fi\n\nbefore_script:\n - |-\n if test -f "./scripts/$TRAVIS_OS_NAME/before_script.sh"; then\n source ./scripts/$TRAVIS_OS_NAME/before_script.sh\n else\n scripts/gen_travis.py > travis_script && diff .travis.yml travis_script\n autoconf\n # If COMPILER_FLAGS are not empty, add them to CC and CXX\n ./configure ${COMPILER_FLAGS:+ CC="$CC $COMPILER_FLAGS" CXX="$CXX $COMPILER_FLAGS"} $CONFIGURE_FLAGS\n make -j3\n make -j3 tests\n fi\n\nscript:\n - |-\n if test -f "./scripts/$TRAVIS_OS_NAME/script.sh"; then\n source ./scripts/$TRAVIS_OS_NAME/script.sh\n else\n make check\n fi\n\n | dataset_sample\yaml\antirez_redis\deps\jemalloc\.travis.yml | .travis.yml | YAML | 21,404 | 0.8 | 0.009685 | 0.022167 | awesome-app | 384 | 2024-12-17T17:00:38.424126 | GPL-3.0 | false | 296b4a094c005455ffb1fccdaf2de253 |
# Number of labels to fetch (optional). Defaults to 20\nnumLabels: 40\n# These labels will not be used even if the issue contains them (optional). \n# Pass a blank array if no labels are to be excluded.\n# excludeLabels: []\nexcludeLabels:\n - "wont fix"\n - "help wanted"\n - "duplicated"\n - "more info needed"\n - "works for me"\n | dataset_sample\yaml\aonez_Keka\.github\labeler.yml | labeler.yml | YAML | 327 | 0.8 | 0.272727 | 0.363636 | awesome-app | 931 | 2025-05-16T09:17:54.715235 | Apache-2.0 | false | 3309a85ba5332efa12cd49ad90188d9e |
# Configuration for move-issues - https://github.com/dessant/move-issues\n\n# Delete the command comment when it contains no other content\ndeleteCommand: true\n\n# Close the source issue after moving\ncloseSourceIssue: true\n\n# Lock the source issue after moving\nlockSourceIssue: true\n\n# Mention issue and comment authors\nmentionAuthors: true\n\n# Preserve mentions in the issue content\nkeepContentMentions: false\n\n# Set custom aliases for targets\n# aliases:\n# r: repo\n# or: owner/repo\n\n# Repository to extend settings from\n# _extends: repo\n | dataset_sample\yaml\aonez_Keka\.github\move.yml | move.yml | YAML | 537 | 0.8 | 0.083333 | 0.705882 | python-kit | 586 | 2025-06-16T06:45:58.959363 | GPL-3.0 | false | 7a3752086a1dde443b9c43b4cf1752a1 |
# Configuration for probot-no-response - https://github.com/probot/no-response\n\n# Number of days of inactivity before an Issue is closed for lack of response\ndaysUntilClose: 7\n# Label requiring a response\nresponseRequiredLabel: "more info needed"\n# Comment to post when closing an Issue for lack of response. Set to `false` to disable\ncloseComment: >\n This issue has been automatically closed because there has been no response\n to our request for more information from the original author. With only the\n information that is currently in the issue, we don't have enough information\n to take action. Please reach out if you have or find the answers we need so\n that we can investigate further.\n | dataset_sample\yaml\aonez_Keka\.github\no-response.yml | no-response.yml | YAML | 699 | 0.8 | 0.384615 | 0.333333 | node-utils | 9 | 2025-01-01T18:16:50.297411 | GPL-3.0 | false | 7294a8d2fd419d69d2e4020f24744452 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.