Compare commits
7 Commits
799763e7e7
...
feature/ao
| Author | SHA1 | Date | |
|---|---|---|---|
| b31ba5d278 | |||
| 29cbab545f | |||
| 60ef6a5df6 | |||
| 1688178cd3 | |||
| ab5f1289c4 | |||
| 2c4627d467 | |||
| 34017ee771 |
39
.drone.yml
39
.drone.yml
@@ -24,7 +24,7 @@ steps:
|
||||
- staging
|
||||
|
||||
- name: build-validator
|
||||
image: rust:1.92
|
||||
image: clux/muslrust:1.91.0-stable
|
||||
commands:
|
||||
- make build-validator
|
||||
when:
|
||||
@@ -32,15 +32,6 @@ steps:
|
||||
- master
|
||||
- staging
|
||||
|
||||
- name: build-combobulator
|
||||
image: rust:1.92
|
||||
commands:
|
||||
- make build-combobulator
|
||||
when:
|
||||
branch:
|
||||
- master
|
||||
- staging
|
||||
|
||||
- name: build-frontend
|
||||
image: oven/bun:1.3.3
|
||||
commands:
|
||||
@@ -121,29 +112,6 @@ steps:
|
||||
event:
|
||||
- push
|
||||
|
||||
- name: image-combobulator
|
||||
image: plugins/docker
|
||||
settings:
|
||||
registry: registry.itzana.me
|
||||
repo: registry.itzana.me/strafesnet/maptest-combobulator
|
||||
tags:
|
||||
- ${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
|
||||
- ${DRONE_BRANCH}
|
||||
username:
|
||||
from_secret: REGISTRY_USER
|
||||
password:
|
||||
from_secret: REGISTRY_PASS
|
||||
dockerfile: combobulator/Containerfile
|
||||
context: .
|
||||
depends_on:
|
||||
- build-combobulator
|
||||
when:
|
||||
branch:
|
||||
- master
|
||||
- staging
|
||||
event:
|
||||
- push
|
||||
|
||||
- name: deploy
|
||||
image: argoproj/argocd:latest
|
||||
commands:
|
||||
@@ -151,7 +119,6 @@ steps:
|
||||
- argocd app --grpc-web set ${DRONE_BRANCH}-maps-service --kustomize-image registry.itzana.me/strafesnet/maptest-api:${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
|
||||
- argocd app --grpc-web set ${DRONE_BRANCH}-maps-service --kustomize-image registry.itzana.me/strafesnet/maptest-frontend:${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
|
||||
- argocd app --grpc-web set ${DRONE_BRANCH}-maps-service --kustomize-image registry.itzana.me/strafesnet/maptest-validator:${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
|
||||
- argocd app --grpc-web set ${DRONE_BRANCH}-maps-service --kustomize-image registry.itzana.me/strafesnet/maptest-combobulator:${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
|
||||
environment:
|
||||
USERNAME:
|
||||
from_secret: ARGO_USER
|
||||
@@ -161,7 +128,6 @@ steps:
|
||||
- image-backend
|
||||
- image-frontend
|
||||
- image-validator
|
||||
- image-combobulator
|
||||
when:
|
||||
branch:
|
||||
- master
|
||||
@@ -177,13 +143,12 @@ steps:
|
||||
depends_on:
|
||||
- build-backend
|
||||
- build-validator
|
||||
- build-combobulator
|
||||
- build-frontend
|
||||
when:
|
||||
event:
|
||||
- pull_request
|
||||
---
|
||||
kind: signature
|
||||
hmac: a654fea05ccf642bb3a41ce777808ff995c8bd7286f2403fae179ce0db025619
|
||||
hmac: 6de9d4b91f14b30561856daf275d1fd523e1ce7a5a3651b660f0d8907b4692fb
|
||||
|
||||
...
|
||||
|
||||
2952
Cargo.lock
generated
2952
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
10
Cargo.toml
10
Cargo.toml
@@ -1,16 +1,6 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"combobulator",
|
||||
"validation",
|
||||
"submissions-api-rs",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.dependencies]
|
||||
async-nats = "0.46.0"
|
||||
rbx_asset = { version = "0.5.0", features = ["gzip", "rustls-tls"], default-features = false, registry = "strafesnet" }
|
||||
rbx_binary = "2.0.1"
|
||||
rbx_dom_weak = "4.1.0"
|
||||
serde = { version = "1.0.215", features = ["derive"] }
|
||||
serde_json = "1.0.133"
|
||||
tokio = { version = "1.41.1", features = ["macros", "rt-multi-thread", "signal"] }
|
||||
|
||||
17
Makefile
17
Makefile
@@ -7,17 +7,14 @@ build-backend:
|
||||
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o build/server cmd/maps-service/service.go
|
||||
|
||||
build-validator:
|
||||
cargo build --release --bin maps-validation
|
||||
|
||||
build-combobulator:
|
||||
cargo build --release --bin maps-combobulator
|
||||
cargo build --release --target x86_64-unknown-linux-musl --bin maps-validation
|
||||
|
||||
build-frontend:
|
||||
rm -rf web/build
|
||||
cd web && bun install --frozen-lockfile
|
||||
cd web && bun run build
|
||||
|
||||
build: build-backend build-validator build-combobulator build-frontend
|
||||
build: build-backend build-validator build-frontend
|
||||
|
||||
# image
|
||||
image-backend:
|
||||
@@ -26,9 +23,6 @@ image-backend:
|
||||
image-validator:
|
||||
docker build . -f validation/Containerfile -t maptest-validator
|
||||
|
||||
image-combobulator:
|
||||
docker build . -f combobulator/Containerfile -t maptest-combobulator
|
||||
|
||||
image-frontend:
|
||||
docker build web -f web/Containerfile -t maptest-frontend
|
||||
|
||||
@@ -39,12 +33,9 @@ docker-backend:
|
||||
docker-validator:
|
||||
make build-validator
|
||||
make image-validator
|
||||
docker-combobulator:
|
||||
make build-combobulator
|
||||
make image-combobulator
|
||||
docker-frontend:
|
||||
make image-frontend
|
||||
|
||||
docker: docker-backend docker-validator docker-combobulator docker-frontend
|
||||
docker: docker-backend docker-validator docker-frontend
|
||||
|
||||
.PHONY: clean build-backend build-validator build-combobulator build-frontend build image-backend image-validator image-combobulator image-frontend docker-backend docker-validator docker-combobulator docker-frontend docker
|
||||
.PHONY: clean build-backend build-validator build-frontend build image-backend image-validator image-frontend docker-backend docker-validator docker-frontend docker
|
||||
|
||||
@@ -26,6 +26,7 @@ func main() {
|
||||
app.Commands = []*cli.Command{
|
||||
cmds.NewServeCommand(),
|
||||
cmds.NewApiCommand(),
|
||||
cmds.NewAORCommand(),
|
||||
}
|
||||
|
||||
if err := app.Run(os.Args); err != nil {
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
[package]
|
||||
name = "maps-combobulator"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
async-nats.workspace = true
|
||||
aws-config = { version = "1", features = ["behavior-version-latest"] }
|
||||
aws-sdk-s3 = "1"
|
||||
map-tool = { version = "3.0.0", registry = "strafesnet", features = ["roblox"], default-features = false }
|
||||
rbx_asset.workspace = true
|
||||
rbx_binary.workspace = true
|
||||
rbx_dom_weak.workspace = true
|
||||
rbxassetid = { version = "0.1.0", registry = "strafesnet" }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
strafesnet_deferred_loader = { version = "0.6.0", registry = "strafesnet" }
|
||||
strafesnet_rbx_loader = { version = "0.10.0", registry = "strafesnet" }
|
||||
strafesnet_snf = { version = "0.3.2", registry = "strafesnet" }
|
||||
tokio.workspace = true
|
||||
tokio-stream = "0.1"
|
||||
@@ -1,4 +0,0 @@
|
||||
FROM debian:trixie-slim AS runtime
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends libssl3t64 ca-certificates && rm -rf /var/lib/apt/lists/*
|
||||
COPY /target/release/maps-combobulator /
|
||||
ENTRYPOINT ["/maps-combobulator"]
|
||||
@@ -1,152 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use rbxassetid::{RobloxAssetId,RobloxAssetIdParseErr};
|
||||
use strafesnet_deferred_loader::{loader::Loader,texture::Texture};
|
||||
|
||||
use strafesnet_rbx_loader::mesh::{MeshIndex,MeshType,MeshWithSize};
|
||||
|
||||
// disallow non-static lifetimes
|
||||
fn static_ustr(s:&'static str)->rbx_dom_weak::Ustr{
|
||||
rbx_dom_weak::ustr(s)
|
||||
}
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum TextureError{
|
||||
NoTexture,
|
||||
RobloxAssetIdParse(RobloxAssetIdParseErr),
|
||||
}
|
||||
impl std::fmt::Display for TextureError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for TextureError{}
|
||||
impl From<RobloxAssetIdParseErr> for TextureError{
|
||||
fn from(value:RobloxAssetIdParseErr)->Self{
|
||||
Self::RobloxAssetIdParse(value)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TextureLoader{
|
||||
textures:HashMap<RobloxAssetId,Texture>,
|
||||
}
|
||||
impl TextureLoader{
|
||||
pub fn new()->Self{
|
||||
Self{
|
||||
textures:HashMap::new(),
|
||||
}
|
||||
}
|
||||
pub fn insert(&mut self,asset_id:RobloxAssetId,texture:Vec<u8>){
|
||||
self.textures.insert(asset_id,Texture::ImageDDS(texture));
|
||||
}
|
||||
}
|
||||
impl Loader for TextureLoader{
|
||||
type Error=TextureError;
|
||||
type Index<'a>=&'a str;
|
||||
type Resource=Texture;
|
||||
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
|
||||
let asset_id:RobloxAssetId=index.parse()?;
|
||||
let data=self.textures.get(&asset_id).ok_or(TextureError::NoTexture)?.clone();
|
||||
Ok(data)
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum MeshError{
|
||||
NoMesh,
|
||||
RobloxAssetIdParse(RobloxAssetIdParseErr),
|
||||
Mesh(strafesnet_rbx_loader::mesh::Error),
|
||||
Union(strafesnet_rbx_loader::union::Error),
|
||||
DecodeBinary(rbx_binary::DecodeError),
|
||||
OneChildPolicy,
|
||||
MissingInstance,
|
||||
}
|
||||
impl std::fmt::Display for MeshError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for MeshError{}
|
||||
impl From<RobloxAssetIdParseErr> for MeshError{
|
||||
fn from(value:RobloxAssetIdParseErr)->Self{
|
||||
Self::RobloxAssetIdParse(value)
|
||||
}
|
||||
}
|
||||
impl From<strafesnet_rbx_loader::mesh::Error> for MeshError{
|
||||
fn from(value:strafesnet_rbx_loader::mesh::Error)->Self{
|
||||
Self::Mesh(value)
|
||||
}
|
||||
}
|
||||
impl From<strafesnet_rbx_loader::union::Error> for MeshError{
|
||||
fn from(value:strafesnet_rbx_loader::union::Error)->Self{
|
||||
Self::Union(value)
|
||||
}
|
||||
}
|
||||
impl From<rbx_binary::DecodeError> for MeshError{
|
||||
fn from(value:rbx_binary::DecodeError)->Self{
|
||||
Self::DecodeBinary(value)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MeshLoader{
|
||||
meshes:HashMap<RobloxAssetId,MeshWithSize>,
|
||||
unions:HashMap<RobloxAssetId,rbx_dom_weak::WeakDom>,
|
||||
}
|
||||
impl MeshLoader{
|
||||
pub fn new()->Self{
|
||||
Self{
|
||||
meshes:HashMap::new(),
|
||||
unions:HashMap::new(),
|
||||
}
|
||||
}
|
||||
pub fn insert_mesh(&mut self,asset_id:RobloxAssetId,mesh:MeshWithSize){
|
||||
self.meshes.insert(asset_id,mesh);
|
||||
}
|
||||
pub fn insert_union(&mut self,asset_id:RobloxAssetId,union:rbx_dom_weak::WeakDom){
|
||||
self.unions.insert(asset_id,union);
|
||||
}
|
||||
}
|
||||
impl Loader for MeshLoader{
|
||||
type Error=MeshError;
|
||||
type Index<'a>=MeshIndex<'a>;
|
||||
type Resource=MeshWithSize;
|
||||
fn load(&mut self,index:Self::Index<'_>)->Result<Self::Resource,Self::Error>{
|
||||
let mesh=match index.mesh_type{
|
||||
MeshType::FileMesh=>{
|
||||
let id:RobloxAssetId=index.content.parse()?;
|
||||
let mesh_with_size=self.meshes.get(&id).ok_or(MeshError::NoMesh)?;
|
||||
mesh_with_size.clone()
|
||||
},
|
||||
MeshType::Union{mut physics_data,mut mesh_data,size_float_bits,part_texture_description}=>{
|
||||
// decode asset
|
||||
let size=size_float_bits.map(f32::from_bits).into();
|
||||
if !index.content.is_empty()&&(physics_data.is_empty()||mesh_data.is_empty()){
|
||||
let id:RobloxAssetId=index.content.parse()?;
|
||||
let dom=self.unions.get(&id).ok_or(MeshError::NoMesh)?;
|
||||
let &[referent]=dom.root().children()else{
|
||||
return Err(MeshError::OneChildPolicy);
|
||||
};
|
||||
let Some(instance)=dom.get_by_ref(referent)else{
|
||||
return Err(MeshError::MissingInstance);
|
||||
};
|
||||
if physics_data.is_empty(){
|
||||
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get(&static_ustr("PhysicsData")){
|
||||
physics_data=data.as_ref();
|
||||
}
|
||||
}
|
||||
if mesh_data.is_empty(){
|
||||
if let Some(rbx_dom_weak::types::Variant::BinaryString(data))=instance.properties.get(&static_ustr("MeshData")){
|
||||
mesh_data=data.as_ref();
|
||||
}
|
||||
}
|
||||
strafesnet_rbx_loader::union::convert(physics_data,mesh_data,size,part_texture_description)?
|
||||
}else{
|
||||
strafesnet_rbx_loader::union::convert(physics_data,mesh_data,size,part_texture_description)?
|
||||
}
|
||||
},
|
||||
};
|
||||
Ok(mesh)
|
||||
}
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
mod loader;
|
||||
mod nats_types;
|
||||
mod process;
|
||||
mod s3;
|
||||
|
||||
const SUBJECT_MAPFIX_RELEASE:&str="maptest.mapfixes.release";
|
||||
const SUBJECT_SUBMISSION_BATCHRELEASE:&str="maptest.submissions.batchrelease";
|
||||
const SUBJECT_SEED:&str="maptest.combobulator.seed";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum StartupError{
|
||||
NatsConnect(async_nats::ConnectError),
|
||||
NatsGetStream(async_nats::jetstream::context::GetStreamError),
|
||||
NatsConsumer(async_nats::jetstream::stream::ConsumerError),
|
||||
NatsConsumerUpdate(async_nats::jetstream::stream::ConsumerUpdateError),
|
||||
NatsStream(async_nats::jetstream::consumer::StreamError),
|
||||
}
|
||||
impl std::fmt::Display for StartupError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for StartupError{}
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum HandleMessageError{
|
||||
Json(serde_json::Error),
|
||||
UnknownSubject(String),
|
||||
Process(process::Error),
|
||||
Ack(async_nats::Error),
|
||||
Publish(async_nats::jetstream::context::PublishError),
|
||||
}
|
||||
impl std::fmt::Display for HandleMessageError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for HandleMessageError{}
|
||||
|
||||
fn from_slice<'a,T:serde::de::Deserialize<'a>>(slice:&'a [u8])->Result<T,HandleMessageError>{
|
||||
serde_json::from_slice(slice).map_err(HandleMessageError::Json)
|
||||
}
|
||||
|
||||
async fn handle_message(
|
||||
processor:&process::Processor,
|
||||
jetstream:&async_nats::jetstream::Context,
|
||||
message:async_nats::jetstream::Message,
|
||||
)->Result<(),HandleMessageError>{
|
||||
match message.subject.as_str(){
|
||||
SUBJECT_MAPFIX_RELEASE=>{
|
||||
let request:nats_types::ReleaseMapfixRequest=from_slice(&message.payload)?;
|
||||
processor.handle_mapfix_release(request).await.map_err(HandleMessageError::Process)?;
|
||||
message.ack().await.map_err(HandleMessageError::Ack)?;
|
||||
},
|
||||
SUBJECT_SUBMISSION_BATCHRELEASE=>{
|
||||
// split batch into individual seed messages
|
||||
let batch:nats_types::ReleaseSubmissionsBatchRequest=from_slice(&message.payload)?;
|
||||
println!("[combobulator] Splitting batch release (operation {}, {} submissions)",
|
||||
batch.OperationID,batch.Submissions.len());
|
||||
for submission in batch.Submissions{
|
||||
let seed=nats_types::SeedCombobulatorRequest{AssetID:submission.UploadedAssetID};
|
||||
let payload=serde_json::to_vec(&seed).map_err(HandleMessageError::Json)?;
|
||||
jetstream.publish(SUBJECT_SEED,payload.into())
|
||||
.await.map_err(HandleMessageError::Publish)?;
|
||||
println!("[combobulator] Queued seed for asset {}",seed.AssetID);
|
||||
}
|
||||
message.ack().await.map_err(HandleMessageError::Ack)?;
|
||||
},
|
||||
SUBJECT_SEED=>{
|
||||
let request:nats_types::SeedCombobulatorRequest=from_slice(&message.payload)?;
|
||||
processor.handle_seed(request).await.map_err(HandleMessageError::Process)?;
|
||||
message.ack().await.map_err(HandleMessageError::Ack)?;
|
||||
},
|
||||
other=>return Err(HandleMessageError::UnknownSubject(other.to_owned())),
|
||||
}
|
||||
|
||||
println!("[combobulator] Message processed and acked");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main()->Result<(),StartupError>{
|
||||
// roblox cookie api for downloading assets
|
||||
let cookie=std::env::var("RBXCOOKIE").expect("RBXCOOKIE env required");
|
||||
let cookie_context=rbx_asset::cookie::Context::new(rbx_asset::cookie::Cookie::new(cookie));
|
||||
|
||||
// s3
|
||||
let s3_bucket=std::env::var("S3_BUCKET").expect("S3_BUCKET env required");
|
||||
let s3_config=aws_config::load_defaults(aws_config::BehaviorVersion::latest()).await;
|
||||
let s3_client=aws_sdk_s3::Client::new(&s3_config);
|
||||
let s3_cache=s3::S3Cache::new(s3_client,s3_bucket);
|
||||
|
||||
let processor=process::Processor{
|
||||
cookie_context,
|
||||
s3:s3_cache,
|
||||
};
|
||||
|
||||
// nats
|
||||
let nats_host=std::env::var("NATS_HOST").expect("NATS_HOST env required");
|
||||
|
||||
const STREAM_NAME:&str="maptest";
|
||||
const DURABLE_NAME:&str="combobulator";
|
||||
|
||||
let filter_subjects=vec![
|
||||
SUBJECT_MAPFIX_RELEASE.to_owned(),
|
||||
SUBJECT_SUBMISSION_BATCHRELEASE.to_owned(),
|
||||
SUBJECT_SEED.to_owned(),
|
||||
];
|
||||
|
||||
let nats_config=async_nats::jetstream::consumer::pull::Config{
|
||||
name:Some(DURABLE_NAME.to_owned()),
|
||||
durable_name:Some(DURABLE_NAME.to_owned()),
|
||||
filter_subjects:filter_subjects.clone(),
|
||||
ack_wait:std::time::Duration::from_secs(900), // 15 minutes for processing
|
||||
max_deliver:5, // retry up to 5 times
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let nasty=async_nats::connect(nats_host).await.map_err(StartupError::NatsConnect)?;
|
||||
let jetstream=async_nats::jetstream::new(nasty);
|
||||
let stream=jetstream.get_stream(STREAM_NAME).await.map_err(StartupError::NatsGetStream)?;
|
||||
let consumer=stream.get_or_create_consumer(DURABLE_NAME,nats_config.clone()).await.map_err(StartupError::NatsConsumer)?;
|
||||
|
||||
// update consumer config if filter subjects changed
|
||||
if consumer.cached_info().config.filter_subjects!=filter_subjects{
|
||||
stream.update_consumer(nats_config).await.map_err(StartupError::NatsConsumerUpdate)?;
|
||||
}
|
||||
|
||||
let mut messages=consumer.messages().await.map_err(StartupError::NatsStream)?;
|
||||
|
||||
// SIGTERM graceful shutdown
|
||||
let mut sig_term=tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
|
||||
.expect("Failed to create SIGTERM signal listener");
|
||||
|
||||
println!("[combobulator] Started, waiting for messages...");
|
||||
|
||||
// sequential processing loop - one message at a time
|
||||
let main_loop=async{
|
||||
while let Some(message_result)=messages.next().await{
|
||||
match message_result{
|
||||
Ok(message)=>{
|
||||
match handle_message(&processor,&jetstream,message).await{
|
||||
Ok(())=>println!("[combobulator] Success"),
|
||||
Err(e)=>println!("[combobulator] Error: {e}"),
|
||||
}
|
||||
},
|
||||
Err(e)=>println!("[combobulator] Message stream error: {e}"),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
tokio::select!{
|
||||
_=sig_term.recv()=>{
|
||||
println!("[combobulator] Received SIGTERM, shutting down");
|
||||
},
|
||||
_=main_loop=>{
|
||||
println!("[combobulator] Message stream ended");
|
||||
},
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
#[expect(nonstandard_style,dead_code)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct ReleaseMapfixRequest{
|
||||
pub MapfixID:u64,
|
||||
pub ModelID:u64,
|
||||
pub ModelVersion:u64,
|
||||
pub TargetAssetID:u64,
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style,dead_code)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct ReleaseSubmissionRequest{
|
||||
pub SubmissionID:u64,
|
||||
pub ReleaseDate:i64,
|
||||
pub ModelID:u64,
|
||||
pub ModelVersion:u64,
|
||||
pub UploadedAssetID:u64,
|
||||
pub DisplayName:String,
|
||||
pub Creator:String,
|
||||
pub GameID:u32,
|
||||
pub Submitter:u64,
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct ReleaseSubmissionsBatchRequest{
|
||||
pub Submissions:Vec<ReleaseSubmissionRequest>,
|
||||
pub OperationID:u32,
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
pub struct SeedCombobulatorRequest{
|
||||
pub AssetID:u64,
|
||||
}
|
||||
@@ -1,236 +0,0 @@
|
||||
use std::io::Cursor;
|
||||
|
||||
use crate::nats_types::ReleaseMapfixRequest;
|
||||
use crate::s3::S3Cache;
|
||||
|
||||
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum ConvertError{
|
||||
IO(std::io::Error),
|
||||
SNFMap(strafesnet_snf::map::Error),
|
||||
RobloxLoadMesh(super::loader::MeshError),
|
||||
RobloxLoadTexture(super::loader::TextureError),
|
||||
}
|
||||
impl std::fmt::Display for ConvertError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for ConvertError{}
|
||||
|
||||
pub fn convert_to_snf(
|
||||
dom:rbx_dom_weak::WeakDom,
|
||||
mut mesh_loader:crate::loader::MeshLoader,
|
||||
mut texture_loader:crate::loader::TextureLoader,
|
||||
)->Result<Vec<u8>,ConvertError>{
|
||||
const FAILURE_MODE:LoadFailureMode=LoadFailureMode::DefaultToNone;
|
||||
|
||||
// run scripts
|
||||
let model=strafesnet_rbx_loader::Model::new(dom);
|
||||
|
||||
let mut place=strafesnet_rbx_loader::Place::from(model);
|
||||
// TODO: script errors report for burn down chart
|
||||
let _script_errors=place.run_scripts().unwrap_or_else(|e|vec![e]);
|
||||
|
||||
// convert
|
||||
let mut texture_deferred_loader=strafesnet_deferred_loader::deferred_loader::RenderConfigDeferredLoader::new();
|
||||
let mut mesh_deferred_loader=strafesnet_deferred_loader::deferred_loader::MeshDeferredLoader::new();
|
||||
|
||||
let map_step1=strafesnet_rbx_loader::rbx::convert(
|
||||
place.as_ref(),
|
||||
&mut texture_deferred_loader,
|
||||
&mut mesh_deferred_loader,
|
||||
);
|
||||
|
||||
let meshpart_meshes=mesh_deferred_loader.into_meshes(&mut mesh_loader,FAILURE_MODE).map_err(ConvertError::RobloxLoadMesh)?;
|
||||
|
||||
let map_step2=map_step1.add_meshpart_meshes_and_calculate_attributes(meshpart_meshes);
|
||||
|
||||
let render_configs=texture_deferred_loader.into_render_configs(&mut texture_loader,FAILURE_MODE).map_err(ConvertError::RobloxLoadTexture)?;
|
||||
|
||||
// TODO: conversion error report for burn down chart
|
||||
let (map,_convert_errors)=map_step2.add_render_configs_and_textures(render_configs);
|
||||
|
||||
let mut snf_buf=Vec::new();
|
||||
strafesnet_snf::map::write_map(Cursor::new(&mut snf_buf),map).map_err(ConvertError::SNFMap)?;
|
||||
|
||||
Ok(snf_buf)
|
||||
}
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
ArchivedModel,
|
||||
LoadDom(map_tool::roblox::LoadDomError),
|
||||
DownloadAsset(map_tool::roblox::DownloadAssetError),
|
||||
ConvertTexture(map_tool::roblox::ConvertTextureError),
|
||||
Union(rbx_binary::DecodeError),
|
||||
Mesh(strafesnet_rbx_loader::mesh::Error),
|
||||
ConvertSnf(ConvertError),
|
||||
S3Get(crate::s3::GetError),
|
||||
S3Put(crate::s3::PutError),
|
||||
}
|
||||
impl std::fmt::Display for Error{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for Error{}
|
||||
|
||||
pub struct Processor{
|
||||
pub cookie_context:rbx_asset::cookie::Context,
|
||||
pub s3:S3Cache,
|
||||
}
|
||||
|
||||
impl Processor{
|
||||
/// Download an asset, returning None if the asset is archived.
|
||||
async fn download_asset(&self,asset_id:u64)->Result<Option<Vec<u8>>,Error>{
|
||||
match map_tool::roblox::download_asset(&self.cookie_context,asset_id).await{
|
||||
Ok(data)=>Ok(Some(data)),
|
||||
Err(e)=>{
|
||||
let s=format!("{e:?}");
|
||||
if s.contains("Requested asset is archived"){
|
||||
println!("[combobulator] Asset {asset_id} is archived, skipping");
|
||||
Ok(None)
|
||||
}else if s.contains("Asset is not approved for the requester"){
|
||||
println!("[combobulator] Asset {asset_id} is not approved for the requester, skipping");
|
||||
Ok(None)
|
||||
}else if s.contains("Request asset was not found"){
|
||||
println!("[combobulator] Asset {asset_id} was not found, skipping");
|
||||
Ok(None)
|
||||
}else{
|
||||
Err(Error::DownloadAsset(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Process a single model: extract assets, cache to S3, build SNF.
|
||||
async fn process_model(&self,asset_id:u64)->Result<(),Error>{
|
||||
println!("[combobulator] Downloading model {asset_id}");
|
||||
let rbxl_bytes=self.download_asset(asset_id).await?
|
||||
.ok_or(Error::ArchivedModel)?;
|
||||
|
||||
// decode dom
|
||||
let dom=map_tool::roblox::load_dom(&rbxl_bytes)
|
||||
.map_err(Error::LoadDom)?;
|
||||
|
||||
// extract unique assets from the file
|
||||
let assets=map_tool::roblox::get_unique_assets(&dom);
|
||||
|
||||
// place textures into 'loader'
|
||||
let mut texture_loader=crate::loader::TextureLoader::new();
|
||||
|
||||
// process textures: download, cache, convert to DDS
|
||||
for &id in &assets.textures{
|
||||
let asset_id=id.0;
|
||||
let dds_key=S3Cache::texture_dds_key(asset_id);
|
||||
|
||||
// fetch cached DDS
|
||||
let dds=if let Some(dds)=self.s3.get(&dds_key).await.map_err(Error::S3Get)?{
|
||||
dds
|
||||
}else{
|
||||
// check raw cache, download if missing
|
||||
let raw_key=S3Cache::texture_raw_key(asset_id);
|
||||
let dds_result=if let Some(data)=self.s3.get(&raw_key).await.map_err(Error::S3Get)?{
|
||||
map_tool::roblox::convert_texture_to_dds(&data)
|
||||
}else{
|
||||
println!("[combobulator] Downloading texture {asset_id}");
|
||||
let Some(data)=self.download_asset(asset_id).await? else{continue};
|
||||
|
||||
// decode while we have ownership
|
||||
let dds_result=map_tool::roblox::convert_texture_to_dds(&data);
|
||||
|
||||
self.s3.put(&raw_key,data).await.map_err(Error::S3Put)?;
|
||||
dds_result
|
||||
};
|
||||
|
||||
// handle error after caching data
|
||||
let dds=dds_result.map_err(Error::ConvertTexture)?;
|
||||
|
||||
self.s3.put(&dds_key,dds.clone()).await.map_err(Error::S3Put)?;
|
||||
|
||||
dds
|
||||
};
|
||||
println!("[combobulator] Texture {asset_id} processed");
|
||||
|
||||
texture_loader.insert(id,dds);
|
||||
}
|
||||
|
||||
let mut mesh_loader=crate::loader::MeshLoader::new();
|
||||
// process meshes
|
||||
for &id in &assets.meshes{
|
||||
let asset_id=id.0;
|
||||
let mesh_key=S3Cache::mesh_key(asset_id);
|
||||
|
||||
let mesh_result=if let Some(data)=self.s3.get(&mesh_key).await.map_err(Error::S3Get)?{
|
||||
strafesnet_rbx_loader::mesh::convert(&data)
|
||||
}else{
|
||||
println!("[combobulator] Downloading mesh {asset_id}");
|
||||
let Some(data)=self.download_asset(asset_id).await? else{continue};
|
||||
|
||||
// decode while we have ownership
|
||||
let mesh_result=strafesnet_rbx_loader::mesh::convert(&data);
|
||||
|
||||
self.s3.put(&mesh_key,data.clone()).await.map_err(Error::S3Put)?;
|
||||
mesh_result
|
||||
};
|
||||
println!("[combobulator] Mesh {asset_id} processed");
|
||||
|
||||
// handle error after caching data
|
||||
let mesh=mesh_result.map_err(Error::Mesh)?;
|
||||
|
||||
mesh_loader.insert_mesh(id,mesh);
|
||||
}
|
||||
|
||||
// process unions
|
||||
for &id in &assets.unions{
|
||||
let asset_id=id.0;
|
||||
let union_key=S3Cache::union_key(asset_id);
|
||||
|
||||
let union_result=if let Some(data)=self.s3.get(&union_key).await.map_err(Error::S3Get)?{
|
||||
rbx_binary::from_reader(data.as_slice())
|
||||
}else{
|
||||
println!("[combobulator] Downloading union {asset_id}");
|
||||
let Some(data)=self.download_asset(asset_id).await? else{continue};
|
||||
|
||||
// decode the data while we have ownership
|
||||
let union_result=rbx_binary::from_reader(data.as_slice());
|
||||
|
||||
self.s3.put(&union_key,data).await.map_err(Error::S3Put)?;
|
||||
union_result
|
||||
};
|
||||
println!("[combobulator] Union {asset_id} processed");
|
||||
|
||||
// handle error after caching data
|
||||
let union=union_result.map_err(Error::Union)?;
|
||||
|
||||
mesh_loader.insert_union(id,union);
|
||||
}
|
||||
|
||||
// convert to SNF and upload
|
||||
println!("[combobulator] Converting to SNF");
|
||||
let snf=convert_to_snf(dom,mesh_loader,texture_loader)
|
||||
.map_err(Error::ConvertSnf)?;
|
||||
let snf_key=S3Cache::snf_key(asset_id);
|
||||
self.s3.put(&snf_key,snf).await.map_err(Error::S3Put)?;
|
||||
println!("[combobulator] SNF uploaded to {snf_key}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Handle a mapfix release message.
|
||||
pub async fn handle_mapfix_release(&self,request:ReleaseMapfixRequest)->Result<(),Error>{
|
||||
println!("[combobulator] Processing mapfix {} (asset {})",
|
||||
request.MapfixID,request.TargetAssetID);
|
||||
self.process_model(request.TargetAssetID).await
|
||||
}
|
||||
|
||||
/// Handle a seed request (reprocess an existing map).
|
||||
pub async fn handle_seed(&self,request:crate::nats_types::SeedCombobulatorRequest)->Result<(),Error>{
|
||||
println!("[combobulator] Seeding asset {}",request.AssetID);
|
||||
self.process_model(request.AssetID).await
|
||||
}
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
use aws_sdk_s3::Client;
|
||||
use aws_sdk_s3::primitives::ByteStream;
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum GetError{
|
||||
Get(aws_sdk_s3::error::SdkError<aws_sdk_s3::operation::get_object::GetObjectError>),
|
||||
Collect(aws_sdk_s3::primitives::ByteStreamError),
|
||||
}
|
||||
impl std::fmt::Display for GetError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for GetError{}
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum PutError{
|
||||
Put(aws_sdk_s3::error::SdkError<aws_sdk_s3::operation::put_object::PutObjectError>),
|
||||
}
|
||||
impl std::fmt::Display for PutError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for PutError{}
|
||||
|
||||
pub struct S3Cache{
|
||||
client:Client,
|
||||
bucket:String,
|
||||
}
|
||||
|
||||
impl S3Cache{
|
||||
pub fn new(client:Client,bucket:String)->Self{
|
||||
Self{client,bucket}
|
||||
}
|
||||
|
||||
/// Try to get a cached object. Returns None if the key doesn't exist.
|
||||
pub async fn get(&self,key:&str)->Result<Option<Vec<u8>>,GetError>{
|
||||
match self.client.get_object()
|
||||
.bucket(&self.bucket)
|
||||
.key(key)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(output)=>{
|
||||
let bytes=output.body.collect().await.map_err(GetError::Collect)?;
|
||||
Ok(Some(bytes.to_vec()))
|
||||
},
|
||||
Err(e)=>{
|
||||
// check if it's a NoSuchKey error
|
||||
if let aws_sdk_s3::error::SdkError::ServiceError(ref service_err)=e{
|
||||
if service_err.err().is_no_such_key(){
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
Err(GetError::Get(e))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Put an object into S3.
|
||||
pub async fn put(&self,key:&str,data:Vec<u8>)->Result<(),PutError>{
|
||||
self.client.put_object()
|
||||
.bucket(&self.bucket)
|
||||
.key(key)
|
||||
.body(ByteStream::from(data))
|
||||
.send()
|
||||
.await
|
||||
.map_err(PutError::Put)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// S3 key helpers
|
||||
|
||||
pub fn texture_raw_key(asset_id:u64)->String{
|
||||
format!("assets/textures/{asset_id}.raw")
|
||||
}
|
||||
|
||||
pub fn texture_dds_key(asset_id:u64)->String{
|
||||
format!("assets/textures/{asset_id}.dds")
|
||||
}
|
||||
|
||||
pub fn mesh_key(asset_id:u64)->String{
|
||||
format!("assets/meshes/{asset_id}")
|
||||
}
|
||||
|
||||
pub fn union_key(asset_id:u64)->String{
|
||||
format!("assets/unions/{asset_id}")
|
||||
}
|
||||
|
||||
pub fn snf_key(model_id:u64)->String{
|
||||
format!("maps/{model_id}.snfm")
|
||||
}
|
||||
}
|
||||
40
docs/docs.go
40
docs/docs.go
@@ -115,46 +115,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/map/{id}/snfm": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"ApiKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Redirects to a signed download URL for a map's SNFM file",
|
||||
"tags": [
|
||||
"maps"
|
||||
],
|
||||
"summary": "Download SNFM file",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Map ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"307": {
|
||||
"description": "Redirect to signed S3 URL"
|
||||
},
|
||||
"404": {
|
||||
"description": "Map not found",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Error"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "General error response",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Error"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
|
||||
@@ -108,46 +108,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/map/{id}/snfm": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"ApiKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Redirects to a signed download URL for a map's SNFM file",
|
||||
"tags": [
|
||||
"maps"
|
||||
],
|
||||
"summary": "Download SNFM file",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Map ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"307": {
|
||||
"description": "Redirect to signed S3 URL"
|
||||
},
|
||||
"404": {
|
||||
"description": "Map not found",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Error"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "General error response",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Error"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
|
||||
@@ -133,31 +133,6 @@ paths:
|
||||
summary: Get map by ID
|
||||
tags:
|
||||
- maps
|
||||
/map/{id}/snfm:
|
||||
get:
|
||||
description: Redirects to a signed download URL for a map's SNFM file
|
||||
parameters:
|
||||
- description: Map ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: integer
|
||||
responses:
|
||||
"307":
|
||||
description: Redirect to signed S3 URL
|
||||
"404":
|
||||
description: Map not found
|
||||
schema:
|
||||
$ref: '#/definitions/Error'
|
||||
default:
|
||||
description: General error response
|
||||
schema:
|
||||
$ref: '#/definitions/Error'
|
||||
security:
|
||||
- ApiKeyAuth: []
|
||||
summary: Download SNFM file
|
||||
tags:
|
||||
- maps
|
||||
securityDefinitions:
|
||||
ApiKeyAuth:
|
||||
in: header
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
|
||||
//go:generate go run github.com/swaggo/swag/cmd/swag@latest init -g ./cmd/maps-service/service.go
|
||||
//go:generate swag init -g ./cmd/maps-service/service.go
|
||||
//go:generate go run github.com/ogen-go/ogen/cmd/ogen@latest --target pkg/api --clean openapi.yaml
|
||||
|
||||
27
go.mod
27
go.mod
@@ -6,7 +6,7 @@ toolchain go1.24.5
|
||||
|
||||
require (
|
||||
git.itzana.me/StrafesNET/dev-service v0.0.0-20250628052121-92af8193b5ed
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20260301211036-f2db3cb46e8c
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20250815013325-1c84f73bdcb1
|
||||
git.itzana.me/strafesnet/utils v0.0.0-20220716194944-d8ca164052f9
|
||||
github.com/dchest/siphash v1.2.3
|
||||
github.com/gin-gonic/gin v1.10.1
|
||||
@@ -20,9 +20,9 @@ require (
|
||||
github.com/swaggo/gin-swagger v1.6.0
|
||||
github.com/swaggo/swag v1.16.6
|
||||
github.com/urfave/cli/v2 v2.27.6
|
||||
go.opentelemetry.io/otel v1.40.0
|
||||
go.opentelemetry.io/otel/metric v1.40.0
|
||||
go.opentelemetry.io/otel/trace v1.40.0
|
||||
go.opentelemetry.io/otel v1.39.0
|
||||
go.opentelemetry.io/otel/metric v1.39.0
|
||||
go.opentelemetry.io/otel/trace v1.39.0
|
||||
google.golang.org/grpc v1.48.0
|
||||
gorm.io/driver/postgres v1.6.0
|
||||
gorm.io/gorm v1.25.12
|
||||
@@ -32,25 +32,6 @@ require (
|
||||
github.com/KyleBanks/depth v1.2.1 // indirect
|
||||
github.com/PuerkitoBio/purell v1.1.1 // indirect
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.41.2 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.10 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.10 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.18 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.18 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.18 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.18 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.10 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.18 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.18 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.96.2 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.6 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.11 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.15 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.7 // indirect
|
||||
github.com/aws/smithy-go v1.24.1 // indirect
|
||||
github.com/bytedance/sonic v1.11.6 // indirect
|
||||
github.com/bytedance/sonic/loader v0.1.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
|
||||
86
go.sum
86
go.sum
@@ -2,12 +2,8 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMT
|
||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
git.itzana.me/StrafesNET/dev-service v0.0.0-20250628052121-92af8193b5ed h1:eGWIQx2AOrSsLC2dieuSs8MCliRE60tvpZnmxsTBtKc=
|
||||
git.itzana.me/StrafesNET/dev-service v0.0.0-20250628052121-92af8193b5ed/go.mod h1:KJal0K++M6HEzSry6JJ2iDPZtOQn5zSstNlDbU3X4Jg=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20251228204118-c20dbb42afec h1:JSar9If1kzb02+Erp+zmSqHKWPPP2NqMQVK15pRmkLE=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20251228204118-c20dbb42afec/go.mod h1:X7XTRUScRkBWq8q8bplbeso105RPDlnY7J6Wy1IwBMs=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20260301210537-0bea64387f6d h1:I73hWqmIcsSH90VHjwsg50v6emQkM0IAA04vb4wktBA=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20260301210537-0bea64387f6d/go.mod h1:X7XTRUScRkBWq8q8bplbeso105RPDlnY7J6Wy1IwBMs=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20260301211036-f2db3cb46e8c h1:sI50ymozoI+HFbxg1AOdCeWF6bJgpeP6OrnCvyjuQ9U=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20260301211036-f2db3cb46e8c/go.mod h1:X7XTRUScRkBWq8q8bplbeso105RPDlnY7J6Wy1IwBMs=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20250815013325-1c84f73bdcb1 h1:imXibfeYcae6og0TTDUFRQ3CQtstGjIoLbCn+pezD2o=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20250815013325-1c84f73bdcb1/go.mod h1:X7XTRUScRkBWq8q8bplbeso105RPDlnY7J6Wy1IwBMs=
|
||||
git.itzana.me/strafesnet/utils v0.0.0-20220716194944-d8ca164052f9 h1:7lU6jyR7S7Rhh1dnUp7GyIRHUTBXZagw8F4n4hOyxLw=
|
||||
git.itzana.me/strafesnet/utils v0.0.0-20220716194944-d8ca164052f9/go.mod h1:uyYerSieEt4v0MJCdPLppG0LtJ4Yj035vuTetWGsxjY=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
@@ -18,44 +14,6 @@ github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbt
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
github.com/aws/aws-sdk-go-v2 v1.41.2 h1:LuT2rzqNQsauaGkPK/7813XxcZ3o3yePY0Iy891T2ls=
|
||||
github.com/aws/aws-sdk-go-v2 v1.41.2/go.mod h1:IvvlAZQXvTXznUPfRVfryiG1fbzE2NGK6m9u39YQ+S4=
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.5 h1:zWFmPmgw4sveAYi1mRqG+E/g0461cJ5M4bJ8/nc6d3Q=
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.5/go.mod h1:nVUlMLVV8ycXSb7mSkcNu9e3v/1TJq2RTlrPwhYWr5c=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.10 h1:9DMthfO6XWZYLfzZglAgW5Fyou2nRI5CuV44sTedKBI=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.10/go.mod h1:2rUIOnA2JaiqYmSKYmRJlcMWy6qTj1vuRFscppSBMcw=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.10 h1:EEhmEUFCE1Yhl7vDhNOI5OCL/iKMdkkYFTRpZXNw7m8=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.10/go.mod h1:RnnlFCAlxQCkN2Q379B67USkBMu1PipEEiibzYN5UTE=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.18 h1:Ii4s+Sq3yDfaMLpjrJsqD6SmG/Wq/P5L/hw2qa78UAY=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.18/go.mod h1:6x81qnY++ovptLE6nWQeWrpXxbnlIex+4H4eYYGcqfc=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.18 h1:F43zk1vemYIqPAwhjTjYIz0irU2EY7sOb/F5eJ3HuyM=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.18/go.mod h1:w1jdlZXrGKaJcNoL+Nnrj+k5wlpGXqnNrKoP22HvAug=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.18 h1:xCeWVjj0ki0l3nruoyP2slHsGArMxeiiaoPN5QZH6YQ=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.18/go.mod h1:r/eLGuGCBw6l36ZRWiw6PaZwPXb6YOj+i/7MizNl5/k=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc=
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.18 h1:eZioDaZGJ0tMM4gzmkNIO2aAoQd+je7Ug7TkvAzlmkU=
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.18/go.mod h1:CCXwUKAJdoWr6/NcxZ+zsiPr6oH/Q5aTooRGYieAyj4=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.5 h1:CeY9LUdur+Dxoeldqoun6y4WtJ3RQtzk0JMP2gfUay0=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.5/go.mod h1:AZLZf2fMaahW5s/wMRciu1sYbdsikT/UHwbUjOdEVTc=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.10 h1:fJvQ5mIBVfKtiyx0AHY6HeWcRX5LGANLpq8SVR+Uazs=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.10/go.mod h1:Kzm5e6OmNH8VMkgK9t+ry5jEih4Y8whqs+1hrkxim1I=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.18 h1:LTRCYFlnnKFlKsyIQxKhJuDuA3ZkrDQMRYm6rXiHlLY=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.18/go.mod h1:XhwkgGG6bHSd00nO/mexWTcTjgd6PjuvWQMqSn2UaEk=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.18 h1:/A/xDuZAVD2BpsS2fftFRo/NoEKQJ8YTnJDEHBy2Gtg=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.18/go.mod h1:hWe9b4f+djUQGmyiGEeOnZv69dtMSgpDRIvNMvuvzvY=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.96.2 h1:M1A9AjcFwlxTLuf0Faj88L8Iqw0n/AJHjpZTQzMMsSc=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.96.2/go.mod h1:KsdTV6Q9WKUZm2mNJnUFmIoXfZux91M3sr/a4REX8e0=
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.6 h1:MzORe+J94I+hYu2a6XmV5yC9huoTv8NRcCrUNedDypQ=
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.6/go.mod h1:hXzcHLARD7GeWnifd8j9RWqtfIgxj4/cAtIVIK7hg8g=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.11 h1:7oGD8KPfBOJGXiCoRKrrrQkbvCp8N++u36hrLMPey6o=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.11/go.mod h1:0DO9B5EUJQlIDif+XJRWCljZRKsAFKh3gpFz7UnDtOo=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.15 h1:edCcNp9eGIUDUCrzoCu1jWAXLGFIizeqkdkKgRlJwWc=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.15/go.mod h1:lyRQKED9xWfgkYC/wmmYfv7iVIM68Z5OQ88ZdcV1QbU=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.7 h1:NITQpgo9A5NrDZ57uOWj+abvXSb83BbyggcUBVksN7c=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.7/go.mod h1:sks5UWBhEuWYDPdwlnRFn1w7xWdH29Jcpe+/PJQefEs=
|
||||
github.com/aws/smithy-go v1.24.1 h1:VbyeNfmYkWoxMVpGUAbQumkODcYmfMRfZ8yQiH30SK0=
|
||||
github.com/aws/smithy-go v1.24.1/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
|
||||
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||
@@ -89,6 +47,8 @@ github.com/dchest/siphash v1.2.3 h1:QXwFc8cFOR2dSa/gE6o/HokBMWtLUaNDVd+22aKHeEA=
|
||||
github.com/dchest/siphash v1.2.3/go.mod h1:0NvQU092bT0ipiFN++/rXm69QG9tVxLAlQHIXMPAkHc=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
|
||||
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ=
|
||||
github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
@@ -97,6 +57,8 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||
github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
|
||||
github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
|
||||
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
|
||||
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||
@@ -111,6 +73,8 @@ github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ=
|
||||
github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
||||
github.com/go-faster/errors v0.7.1 h1:MkJTnDoEdi9pDabt1dpWf7AA8/BaSYZqibYyhZ20AYg=
|
||||
github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7FPGZP2quo=
|
||||
github.com/go-faster/jx v1.1.0 h1:ZsW3wD+snOdmTDy9eIVgQdjUpXRRV4rqW8NS3t+20bg=
|
||||
github.com/go-faster/jx v1.1.0/go.mod h1:vKDNikrKoyUmpzaJ0OkIkRQClNHFX/nF3dnTJZb3skg=
|
||||
github.com/go-faster/jx v1.2.0 h1:T2YHJPrFaYu21fJtUxC9GzmluKu8rVIFDwwGBKTDseI=
|
||||
github.com/go-faster/jx v1.2.0/go.mod h1:UWLOVDmMG597a5tBFPLIWJdUxz5/2emOpfsj9Neg0PE=
|
||||
github.com/go-faster/yaml v0.4.6 h1:lOK/EhI04gCpPgPhgt0bChS6bvw7G3WwI8xxVe0sw9I=
|
||||
@@ -188,6 +152,8 @@ github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8Hm
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI=
|
||||
github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||
github.com/klauspost/compress v1.18.1 h1:bcSGx7UbpBqMChDtsF28Lw6v/G94LPrrbMbdC3JH2co=
|
||||
github.com/klauspost/compress v1.18.1/go.mod h1:ZQFFVG+MdnR0P+l6wpXgIL4NTtwiKIdBnrBd8Nrxr+0=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
@@ -207,8 +173,11 @@ github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN
|
||||
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||
github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA=
|
||||
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
||||
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
@@ -223,6 +192,8 @@ github.com/nats-io/nkeys v0.4.7/go.mod h1:kqXRgRDPlGy7nGaEDMuYzmiJCIAAWDK0IMBtDm
|
||||
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
|
||||
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/ogen-go/ogen v1.2.1 h1:C5A0lvUMu2wl+eWIxnpXMWnuOJ26a2FyzR1CIC2qG0M=
|
||||
github.com/ogen-go/ogen v1.2.1/go.mod h1:P2zQdEu8UqaVRfD5GEFvl+9q63VjMLvDquq1wVbyInM=
|
||||
github.com/ogen-go/ogen v1.18.0 h1:6RQ7lFBjOeNaUWu4getfqIh4GJbEY4hqKuzDtec/g60=
|
||||
github.com/ogen-go/ogen v1.18.0/go.mod h1:dHFr2Wf6cA7tSxMI+zPC21UR5hAlDw8ZYUkK3PziURY=
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
||||
@@ -237,6 +208,8 @@ github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0t
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
|
||||
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
|
||||
github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
@@ -279,21 +252,17 @@ go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ
|
||||
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
|
||||
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
|
||||
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
|
||||
go.opentelemetry.io/otel v1.40.0 h1:oA5YeOcpRTXq6NN7frwmwFR0Cn3RhTVZvXsP4duvCms=
|
||||
go.opentelemetry.io/otel v1.40.0/go.mod h1:IMb+uXZUKkMXdPddhwAHm6UfOwJyh4ct1ybIlV14J0g=
|
||||
go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0=
|
||||
go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs=
|
||||
go.opentelemetry.io/otel/metric v1.40.0 h1:rcZe317KPftE2rstWIBitCdVp89A2HqjkxR3c11+p9g=
|
||||
go.opentelemetry.io/otel/metric v1.40.0/go.mod h1:ib/crwQH7N3r5kfiBZQbwrTge743UDc7DTFVZrrXnqc=
|
||||
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
||||
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
||||
go.opentelemetry.io/otel/trace v1.40.0 h1:WA4etStDttCSYuhwvEa8OP8I5EWu24lkOzp+ZYblVjw=
|
||||
go.opentelemetry.io/otel/trace v1.40.0/go.mod h1:zeAhriXecNGP/s2SEG3+Y8X9ujcJOTqQ5RgdEJcawiA=
|
||||
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
|
||||
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
|
||||
go.uber.org/zap v1.27.1 h1:08RqriUEv8+ArZRYSTXy1LeBScaMpVSTBhCeaZYfMYc=
|
||||
go.uber.org/zap v1.27.1/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
@@ -302,15 +271,21 @@ golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
|
||||
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
|
||||
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
|
||||
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20240531132922-fd00a4e0eefc h1:O9NuF4s+E/PvMIy+9IUZB9znFwUIXEWSstNjek6VpVg=
|
||||
golang.org/x/exp v0.0.0-20240531132922-fd00a4e0eefc/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc=
|
||||
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 h1:fQsdNF2N+/YewlRZiricy4P1iimyPKZ/xwniHj8Q2a0=
|
||||
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
||||
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@@ -326,6 +301,8 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
|
||||
golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
||||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
||||
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
@@ -335,6 +312,8 @@ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
|
||||
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -350,8 +329,11 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
|
||||
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
@@ -362,6 +344,8 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
|
||||
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
@@ -371,6 +355,8 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
|
||||
493
kind-setup.sh
Executable file
493
kind-setup.sh
Executable file
@@ -0,0 +1,493 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Configuration
|
||||
CLUSTER_NAME="${KIND_CLUSTER_NAME:-maps-service-local}"
|
||||
INFRA_PATH="${INFRA_PATH:-$HOME/Documents/Projects/infra}"
|
||||
NAMESPACE="${NAMESPACE:-default}"
|
||||
REGISTRY_NAME="kind-registry"
|
||||
REGISTRY_PORT="5001"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
log_info() {
|
||||
echo -e "${GREEN}[INFO]${NC} $1"
|
||||
}
|
||||
|
||||
log_warn() {
|
||||
echo -e "${YELLOW}[WARN]${NC} $1"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
echo -e "${RED}[ERROR]${NC} $1"
|
||||
}
|
||||
|
||||
# Check dependencies
|
||||
check_dependencies() {
|
||||
log_info "Checking dependencies..."
|
||||
local deps=("kind" "kubectl" "docker")
|
||||
for dep in "${deps[@]}"; do
|
||||
if ! command -v "$dep" &> /dev/null; then
|
||||
log_error "$dep is not installed. Please install it first."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
log_info "All dependencies are installed"
|
||||
}
|
||||
|
||||
# Create local container registry
|
||||
create_registry() {
|
||||
if [ "$(docker ps -q -f name=${REGISTRY_NAME})" ]; then
|
||||
log_info "Registry ${REGISTRY_NAME} already running"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [ "$(docker ps -aq -f name=${REGISTRY_NAME})" ]; then
|
||||
log_info "Starting existing registry ${REGISTRY_NAME}"
|
||||
docker start ${REGISTRY_NAME}
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_info "Creating local registry ${REGISTRY_NAME}..."
|
||||
docker run -d --restart=always -p "127.0.0.1:${REGISTRY_PORT}:5000" --name "${REGISTRY_NAME}" registry:2
|
||||
}
|
||||
|
||||
# Create KIND cluster with registry
|
||||
create_cluster() {
|
||||
if kind get clusters | grep -q "^${CLUSTER_NAME}$"; then
|
||||
log_warn "Cluster ${CLUSTER_NAME} already exists"
|
||||
read -p "Do you want to delete and recreate it? (y/N): " -n 1 -r
|
||||
echo
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
log_info "Deleting existing cluster..."
|
||||
kind delete cluster --name "${CLUSTER_NAME}"
|
||||
else
|
||||
log_info "Using existing cluster"
|
||||
kubectl config use-context "kind-${CLUSTER_NAME}"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
log_info "Creating KIND cluster ${CLUSTER_NAME}..."
|
||||
cat <<EOF | kind create cluster --name "${CLUSTER_NAME}" --config=-
|
||||
kind: Cluster
|
||||
apiVersion: kind.x-k8s.io/v1alpha4
|
||||
containerdConfigPatches:
|
||||
- |-
|
||||
[plugins."io.containerd.grpc.v1.cri".registry.mirrors."localhost:${REGISTRY_PORT}"]
|
||||
endpoint = ["http://${REGISTRY_NAME}:5000"]
|
||||
nodes:
|
||||
- role: control-plane
|
||||
kubeadmConfigPatches:
|
||||
- |
|
||||
kind: InitConfiguration
|
||||
nodeRegistration:
|
||||
kubeletExtraArgs:
|
||||
node-labels: "ingress-ready=true"
|
||||
extraPortMappings:
|
||||
- containerPort: 80
|
||||
hostPort: 80
|
||||
protocol: TCP
|
||||
- containerPort: 443
|
||||
hostPort: 443
|
||||
protocol: TCP
|
||||
- containerPort: 8080
|
||||
hostPort: 8080
|
||||
protocol: TCP
|
||||
- containerPort: 3000
|
||||
hostPort: 3000
|
||||
protocol: TCP
|
||||
EOF
|
||||
|
||||
# Connect the registry to the cluster network
|
||||
if [ "$(docker inspect -f='{{json .NetworkSettings.Networks.kind}}' "${REGISTRY_NAME}")" = 'null' ]; then
|
||||
log_info "Connecting registry to cluster network..."
|
||||
docker network connect "kind" "${REGISTRY_NAME}"
|
||||
fi
|
||||
|
||||
# Document the local registry
|
||||
kubectl apply -f - <<EOF
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: local-registry-hosting
|
||||
namespace: kube-public
|
||||
data:
|
||||
localRegistryHosting.v1: |
|
||||
host: "localhost:${REGISTRY_PORT}"
|
||||
help: "https://kind.sigs.k8s.io/docs/user/local-registry/"
|
||||
EOF
|
||||
|
||||
log_info "KIND cluster created successfully"
|
||||
}
|
||||
|
||||
# Build Docker images
|
||||
build_images() {
|
||||
log_info "Building Docker images..."
|
||||
|
||||
log_info "Building backend..."
|
||||
make build-backend
|
||||
docker build -t localhost:${REGISTRY_PORT}/maptest-api:local .
|
||||
docker push localhost:${REGISTRY_PORT}/maptest-api:local
|
||||
|
||||
log_info "Building validator..."
|
||||
make build-validator
|
||||
docker build -f validation/Containerfile -t localhost:${REGISTRY_PORT}/maptest-validator:local .
|
||||
docker push localhost:${REGISTRY_PORT}/maptest-validator:local
|
||||
|
||||
log_info "Building frontend..."
|
||||
docker build web -f web/Containerfile -t localhost:${REGISTRY_PORT}/maptest-frontend:local .
|
||||
docker push localhost:${REGISTRY_PORT}/maptest-frontend:local
|
||||
|
||||
log_info "All images built and pushed to local registry"
|
||||
}
|
||||
|
||||
# Create secrets
|
||||
create_secrets() {
|
||||
log_info "Creating Kubernetes secrets..."
|
||||
|
||||
# Create dummy secrets for local development
|
||||
kubectl create secret generic cockroach-qtdb \
|
||||
--from-literal=HOST=data-postgres \
|
||||
--from-literal=PORT=5432 \
|
||||
--from-literal=USER=postgres \
|
||||
--from-literal=PASS=localpassword \
|
||||
--dry-run=client -o yaml | kubectl apply -f -
|
||||
|
||||
kubectl create secret generic maptest-cookie \
|
||||
--from-literal=api=dummy-api-key \
|
||||
--dry-run=client -o yaml | kubectl apply -f -
|
||||
|
||||
kubectl create secret generic auth-service-secrets \
|
||||
--from-literal=DISCORD_CLIENT_ID=dummy \
|
||||
--from-literal=DISCORD_CLIENT_SECRET=dummy \
|
||||
--from-literal=RBX_API_KEY=dummy \
|
||||
--dry-run=client -o yaml | kubectl apply -f -
|
||||
|
||||
log_info "Secrets created"
|
||||
}
|
||||
|
||||
# Deploy dependencies
|
||||
deploy_dependencies() {
|
||||
log_info "Deploying dependencies..."
|
||||
|
||||
# Deploy PostgreSQL (manual deployment)
|
||||
log_info "Deploying PostgreSQL..."
|
||||
kubectl apply -f - <<EOF
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: data-postgres
|
||||
spec:
|
||||
ports:
|
||||
- port: 5432
|
||||
targetPort: 5432
|
||||
selector:
|
||||
app: data-postgres
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: data-postgres
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: data-postgres
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: data-postgres
|
||||
spec:
|
||||
containers:
|
||||
- name: postgres
|
||||
image: postgres:15
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: postgres
|
||||
- name: POSTGRES_PASSWORD
|
||||
value: localpassword
|
||||
- name: POSTGRES_DB
|
||||
value: postgres
|
||||
volumeMounts:
|
||||
- name: postgres-storage
|
||||
mountPath: /var/lib/postgresql/data
|
||||
volumes:
|
||||
- name: postgres-storage
|
||||
emptyDir: {}
|
||||
EOF
|
||||
|
||||
# Deploy Redis (using a simple deployment)
|
||||
log_info "Deploying Redis..."
|
||||
kubectl apply -f - <<EOF
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: redis-master
|
||||
spec:
|
||||
ports:
|
||||
- port: 6379
|
||||
targetPort: 6379
|
||||
selector:
|
||||
app: redis
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: redis
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: redis
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: redis
|
||||
spec:
|
||||
containers:
|
||||
- name: redis
|
||||
image: redis:latest
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
command: ["redis-server", "--appendonly", "yes"]
|
||||
EOF
|
||||
|
||||
# Deploy NATS
|
||||
log_info "Deploying NATS..."
|
||||
kubectl apply -f - <<EOF
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: nats
|
||||
spec:
|
||||
ports:
|
||||
- port: 4222
|
||||
targetPort: 4222
|
||||
selector:
|
||||
app: nats
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: nats
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: nats
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: nats
|
||||
spec:
|
||||
containers:
|
||||
- name: nats
|
||||
image: nats:latest
|
||||
args: ["-js"]
|
||||
ports:
|
||||
- containerPort: 4222
|
||||
EOF
|
||||
|
||||
# Deploy Auth Service (if needed)
|
||||
if [ -d "${INFRA_PATH}/applications/auth-service/base" ]; then
|
||||
log_info "Deploying auth-service..."
|
||||
kubectl apply -k "${INFRA_PATH}/applications/auth-service/base" || log_warn "Auth service deployment failed, continuing..."
|
||||
fi
|
||||
|
||||
# Deploy Data Service (if needed)
|
||||
if [ -d "${INFRA_PATH}/applications/data-service/base" ]; then
|
||||
log_info "Deploying data-service..."
|
||||
kubectl apply -k "${INFRA_PATH}/applications/data-service/base" || log_warn "Data service deployment failed, continuing..."
|
||||
fi
|
||||
|
||||
log_info "Waiting for dependencies to be ready..."
|
||||
kubectl wait --for=condition=ready pod -l app=data-postgres --timeout=120s || log_warn "PostgreSQL not ready yet"
|
||||
kubectl wait --for=condition=ready pod -l app=nats --timeout=60s || log_warn "NATS not ready yet"
|
||||
}
|
||||
|
||||
# Deploy maps-service
|
||||
deploy_maps_service() {
|
||||
log_info "Deploying maps-service..."
|
||||
|
||||
# Create a local overlay for development
|
||||
local temp_dir=$(mktemp -d)
|
||||
trap "rm -rf ${temp_dir}" EXIT
|
||||
|
||||
cp -r "${INFRA_PATH}/applications/maps-services/base" "${temp_dir}/"
|
||||
|
||||
# Create a custom kustomization for local development
|
||||
cat > "${temp_dir}/base/kustomization.yaml" <<EOF
|
||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
commonLabels:
|
||||
service: maps-service
|
||||
|
||||
resources:
|
||||
- api.yaml
|
||||
- configmap.yaml
|
||||
- frontend.yaml
|
||||
- validator.yaml
|
||||
|
||||
images:
|
||||
- name: registry.itzana.me/strafesnet/maptest-api
|
||||
newName: localhost:${REGISTRY_PORT}/maptest-api
|
||||
newTag: local
|
||||
- name: registry.itzana.me/strafesnet/maptest-frontend
|
||||
newName: localhost:${REGISTRY_PORT}/maptest-frontend
|
||||
newTag: local
|
||||
- name: registry.itzana.me/strafesnet/maptest-validator
|
||||
newName: localhost:${REGISTRY_PORT}/maptest-validator
|
||||
newTag: local
|
||||
|
||||
patches:
|
||||
- target:
|
||||
kind: Deployment
|
||||
patch: |-
|
||||
- op: remove
|
||||
path: /spec/template/spec/imagePullSecrets
|
||||
EOF
|
||||
|
||||
kubectl apply -k "${temp_dir}/base" || {
|
||||
log_error "Failed to deploy maps-service"
|
||||
return 1
|
||||
}
|
||||
|
||||
log_info "Waiting for maps-service to be ready..."
|
||||
kubectl wait --for=condition=ready pod -l app=maptest-api --timeout=120s || log_warn "API not ready yet"
|
||||
kubectl wait --for=condition=ready pod -l app=maptest-frontend --timeout=120s || log_warn "Frontend not ready yet"
|
||||
kubectl wait --for=condition=ready pod -l app=maptest-validator --timeout=120s || log_warn "Validator not ready yet"
|
||||
}
|
||||
|
||||
# Port forwarding
|
||||
setup_port_forwarding() {
|
||||
log_info "Setting up port forwarding..."
|
||||
|
||||
log_info "Port forwarding for API (8080)..."
|
||||
kubectl port-forward svc/maptest-api 8080:8080 &
|
||||
|
||||
log_info "Port forwarding for Frontend (3000)..."
|
||||
kubectl port-forward svc/maptest-frontend 3000:3000 &
|
||||
|
||||
log_info "Port forwarding setup complete"
|
||||
log_info "You may need to manually manage these port-forwards or run them in separate terminals"
|
||||
}
|
||||
|
||||
# Display cluster info
|
||||
display_info() {
|
||||
log_info "======================================"
|
||||
log_info "KIND Cluster Setup Complete!"
|
||||
log_info "======================================"
|
||||
echo
|
||||
log_info "Cluster name: ${CLUSTER_NAME}"
|
||||
log_info "Local registry: localhost:${REGISTRY_PORT}"
|
||||
echo
|
||||
log_info "Services:"
|
||||
kubectl get svc
|
||||
echo
|
||||
log_info "Pods:"
|
||||
kubectl get pods
|
||||
echo
|
||||
log_info "Access your application:"
|
||||
log_info " - Frontend: http://localhost:3000"
|
||||
log_info " - API: http://localhost:8080"
|
||||
echo
|
||||
log_info "Useful commands:"
|
||||
log_info " - View logs: kubectl logs -f <pod-name>"
|
||||
log_info " - Get pods: kubectl get pods"
|
||||
log_info " - Delete cluster: kind delete cluster --name ${CLUSTER_NAME}"
|
||||
log_info " - Rebuild and redeploy: ./kind-setup.sh --rebuild"
|
||||
}
|
||||
|
||||
# Cleanup function
|
||||
cleanup() {
|
||||
log_info "Cleaning up..."
|
||||
kind delete cluster --name "${CLUSTER_NAME}"
|
||||
docker stop ${REGISTRY_NAME} && docker rm ${REGISTRY_NAME}
|
||||
log_info "Cleanup complete"
|
||||
}
|
||||
|
||||
# Main function
|
||||
main() {
|
||||
local rebuild=false
|
||||
local cleanup_only=false
|
||||
|
||||
# Parse arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--rebuild)
|
||||
rebuild=true
|
||||
shift
|
||||
;;
|
||||
--cleanup)
|
||||
cleanup_only=true
|
||||
shift
|
||||
;;
|
||||
--infra-path)
|
||||
INFRA_PATH="$2"
|
||||
shift 2
|
||||
;;
|
||||
--help)
|
||||
echo "Usage: $0 [OPTIONS]"
|
||||
echo "Options:"
|
||||
echo " --rebuild Rebuild and push Docker images"
|
||||
echo " --cleanup Delete the cluster and registry"
|
||||
echo " --infra-path PATH Path to infra directory (default: ~/Documents/Projects/infra)"
|
||||
echo " --help Show this help message"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
log_error "Unknown option: $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$cleanup_only" = true ]; then
|
||||
cleanup
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Validate infra path
|
||||
if [ ! -d "$INFRA_PATH" ]; then
|
||||
log_error "Infra path does not exist: $INFRA_PATH"
|
||||
log_error "Please provide a valid path using --infra-path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$INFRA_PATH/applications/maps-services" ]; then
|
||||
log_error "maps-services not found in infra path: $INFRA_PATH/applications/maps-services"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_info "Using infra path: $INFRA_PATH"
|
||||
|
||||
check_dependencies
|
||||
create_registry
|
||||
create_cluster
|
||||
|
||||
if [ "$rebuild" = true ]; then
|
||||
build_images
|
||||
fi
|
||||
|
||||
create_secrets
|
||||
deploy_dependencies
|
||||
deploy_maps_service
|
||||
display_info
|
||||
|
||||
log_info "Setup complete! Press Ctrl+C to stop port forwarding and exit."
|
||||
log_warn "Note: You may want to set up port-forwarding manually in separate terminals:"
|
||||
log_info " kubectl port-forward svc/maptest-api 8080:8080"
|
||||
log_info " kubectl port-forward svc/maptest-frontend 3000:3000"
|
||||
}
|
||||
|
||||
# Run main function
|
||||
main "$@"
|
||||
331
openapi.yaml
331
openapi.yaml
@@ -6,6 +6,8 @@ info:
|
||||
servers:
|
||||
- url: https://submissions.strafes.net/v1
|
||||
tags:
|
||||
- name: AOR
|
||||
description: AOR (Accept or Reject) event operations
|
||||
- name: Mapfixes
|
||||
description: Mapfix operations
|
||||
- name: Maps
|
||||
@@ -184,29 +186,6 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/maps/{MapID}/combobulate:
|
||||
post:
|
||||
summary: Queue a map for combobulator processing
|
||||
operationId: combobulate
|
||||
tags:
|
||||
- Maps
|
||||
parameters:
|
||||
- name: MapID
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
responses:
|
||||
"204":
|
||||
description: Successful response
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/maps/{MapID}/download:
|
||||
get:
|
||||
summary: Download the map asset
|
||||
@@ -235,21 +214,6 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/maps-admin/seed-combobulator:
|
||||
post:
|
||||
summary: Queue all maps for combobulator processing
|
||||
operationId: seedCombobulator
|
||||
tags:
|
||||
- Maps
|
||||
responses:
|
||||
"204":
|
||||
description: Successful response
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/mapfixes:
|
||||
get:
|
||||
summary: Get list of mapfixes
|
||||
@@ -998,6 +962,89 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/submissions/{SubmissionID}/reviews:
|
||||
get:
|
||||
summary: Get all reviews for a submission
|
||||
operationId: listSubmissionReviews
|
||||
tags:
|
||||
- Submissions
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/SubmissionID'
|
||||
responses:
|
||||
"200":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/SubmissionReview"
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
post:
|
||||
summary: Create a review for a submission
|
||||
operationId: createSubmissionReview
|
||||
tags:
|
||||
- Submissions
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/SubmissionID'
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/SubmissionReviewCreate"
|
||||
responses:
|
||||
"200":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/SubmissionReview"
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/submissions/{SubmissionID}/reviews/{ReviewID}:
|
||||
patch:
|
||||
summary: Update an existing review
|
||||
operationId: updateSubmissionReview
|
||||
tags:
|
||||
- Submissions
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/SubmissionID'
|
||||
- name: ReviewID
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/SubmissionReviewCreate"
|
||||
responses:
|
||||
"200":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/SubmissionReview"
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/submissions/{SubmissionID}/model:
|
||||
post:
|
||||
summary: Update model following role restrictions
|
||||
@@ -1262,6 +1309,109 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/aor-events:
|
||||
get:
|
||||
summary: Get list of AOR events
|
||||
operationId: listAOREvents
|
||||
tags:
|
||||
- AOR
|
||||
security: []
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/Page"
|
||||
- $ref: "#/components/parameters/Limit"
|
||||
responses:
|
||||
"200":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/AOREvent"
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/aor-events/active:
|
||||
get:
|
||||
summary: Get the currently active AOR event
|
||||
operationId: getActiveAOREvent
|
||||
tags:
|
||||
- AOR
|
||||
security: []
|
||||
responses:
|
||||
"200":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/AOREvent"
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/aor-events/{AOREventID}:
|
||||
get:
|
||||
summary: Get a specific AOR event
|
||||
operationId: getAOREvent
|
||||
tags:
|
||||
- AOR
|
||||
security: []
|
||||
parameters:
|
||||
- name: AOREventID
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 1
|
||||
responses:
|
||||
"200":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/AOREvent"
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/aor-events/{AOREventID}/submissions:
|
||||
get:
|
||||
summary: Get all submissions for a specific AOR event
|
||||
operationId: getAOREventSubmissions
|
||||
tags:
|
||||
- AOR
|
||||
security: []
|
||||
parameters:
|
||||
- name: AOREventID
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 1
|
||||
responses:
|
||||
"200":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Submission"
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/script-policy:
|
||||
get:
|
||||
summary: Get list of script policies
|
||||
@@ -1821,6 +1971,56 @@ components:
|
||||
minimum: 0
|
||||
maximum: 100
|
||||
schemas:
|
||||
AOREvent:
|
||||
type: object
|
||||
required:
|
||||
- ID
|
||||
- StartDate
|
||||
- FreezeDate
|
||||
- SelectionDate
|
||||
- DecisionDate
|
||||
- Status
|
||||
- CreatedAt
|
||||
- UpdatedAt
|
||||
properties:
|
||||
ID:
|
||||
type: integer
|
||||
format: int64
|
||||
StartDate:
|
||||
type: integer
|
||||
format: int64
|
||||
description: Unix timestamp for the 1st day of AOR month
|
||||
FreezeDate:
|
||||
type: integer
|
||||
format: int64
|
||||
description: Unix timestamp when submissions are frozen
|
||||
SelectionDate:
|
||||
type: integer
|
||||
format: int64
|
||||
description: Unix timestamp when automatic selection occurs (end of week 1)
|
||||
DecisionDate:
|
||||
type: integer
|
||||
format: int64
|
||||
description: Unix timestamp when final accept/reject decisions are made (end of month)
|
||||
Status:
|
||||
type: integer
|
||||
format: int32
|
||||
minimum: 0
|
||||
maximum: 5
|
||||
description: >
|
||||
AOR Event Status:
|
||||
* `0` - Scheduled
|
||||
* `1` - Open
|
||||
* `2` - Frozen
|
||||
* `3` - Selected
|
||||
* `4` - Completed
|
||||
* `5` - Closed
|
||||
CreatedAt:
|
||||
type: integer
|
||||
format: int64
|
||||
UpdatedAt:
|
||||
type: integer
|
||||
format: int64
|
||||
AuditEvent:
|
||||
type: object
|
||||
required:
|
||||
@@ -2406,6 +2606,61 @@ components:
|
||||
- ReleasedMapfixes
|
||||
- SubmittedSubmissions
|
||||
- SubmittedMapfixes
|
||||
SubmissionReview:
|
||||
required:
|
||||
- ID
|
||||
- SubmissionID
|
||||
- ReviewerID
|
||||
- Recommend
|
||||
- Description
|
||||
- Outdated
|
||||
- CreatedAt
|
||||
- UpdatedAt
|
||||
type: object
|
||||
properties:
|
||||
ID:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
SubmissionID:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
ReviewerID:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
Recommend:
|
||||
type: boolean
|
||||
description: Whether the reviewer recommends accepting the submission
|
||||
Description:
|
||||
type: string
|
||||
maxLength: 2048
|
||||
description: Text description of the review reasoning
|
||||
Outdated:
|
||||
type: boolean
|
||||
description: Flag indicating if the review is outdated due to submission changes
|
||||
CreatedAt:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
UpdatedAt:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
SubmissionReviewCreate:
|
||||
required:
|
||||
- Recommend
|
||||
- Description
|
||||
type: object
|
||||
properties:
|
||||
Recommend:
|
||||
type: boolean
|
||||
description: Whether the reviewer recommends accepting the submission
|
||||
Description:
|
||||
type: string
|
||||
maxLength: 2048
|
||||
description: Text description of the review reasoning
|
||||
Error:
|
||||
description: Represents error object
|
||||
type: object
|
||||
|
||||
@@ -4,7 +4,6 @@ package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
ht "github.com/ogen-go/ogen/http"
|
||||
"github.com/ogen-go/ogen/middleware"
|
||||
@@ -83,8 +82,18 @@ func (o otelOptionFunc) applyServer(c *serverConfig) {
|
||||
|
||||
func newServerConfig(opts ...ServerOption) serverConfig {
|
||||
cfg := serverConfig{
|
||||
NotFound: http.NotFound,
|
||||
MethodNotAllowed: nil,
|
||||
NotFound: http.NotFound,
|
||||
MethodNotAllowed: func(w http.ResponseWriter, r *http.Request, allowed string) {
|
||||
status := http.StatusMethodNotAllowed
|
||||
if r.Method == "OPTIONS" {
|
||||
w.Header().Set("Access-Control-Allow-Methods", allowed)
|
||||
w.Header().Set("Access-Control-Allow-Headers", "Content-Type")
|
||||
status = http.StatusNoContent
|
||||
} else {
|
||||
w.Header().Set("Allow", allowed)
|
||||
}
|
||||
w.WriteHeader(status)
|
||||
},
|
||||
ErrorHandler: ogenerrors.DefaultErrorHandler,
|
||||
Middleware: nil,
|
||||
MaxMultipartMemory: 32 << 20, // 32 MB
|
||||
@@ -107,44 +116,8 @@ func (s baseServer) notFound(w http.ResponseWriter, r *http.Request) {
|
||||
s.cfg.NotFound(w, r)
|
||||
}
|
||||
|
||||
type notAllowedParams struct {
|
||||
allowedMethods string
|
||||
allowedHeaders map[string]string
|
||||
acceptPost string
|
||||
acceptPatch string
|
||||
}
|
||||
|
||||
func (s baseServer) notAllowed(w http.ResponseWriter, r *http.Request, params notAllowedParams) {
|
||||
h := w.Header()
|
||||
isOptions := r.Method == "OPTIONS"
|
||||
if isOptions {
|
||||
h.Set("Access-Control-Allow-Methods", params.allowedMethods)
|
||||
if params.allowedHeaders != nil {
|
||||
m := r.Header.Get("Access-Control-Request-Method")
|
||||
if m != "" {
|
||||
allowedHeaders, ok := params.allowedHeaders[strings.ToUpper(m)]
|
||||
if ok {
|
||||
h.Set("Access-Control-Allow-Headers", allowedHeaders)
|
||||
}
|
||||
}
|
||||
}
|
||||
if params.acceptPost != "" {
|
||||
h.Set("Accept-Post", params.acceptPost)
|
||||
}
|
||||
if params.acceptPatch != "" {
|
||||
h.Set("Accept-Patch", params.acceptPatch)
|
||||
}
|
||||
}
|
||||
if s.cfg.MethodNotAllowed != nil {
|
||||
s.cfg.MethodNotAllowed(w, r, params.allowedMethods)
|
||||
return
|
||||
}
|
||||
status := http.StatusNoContent
|
||||
if !isOptions {
|
||||
h.Set("Allow", params.allowedMethods)
|
||||
status = http.StatusMethodNotAllowed
|
||||
}
|
||||
w.WriteHeader(status)
|
||||
func (s baseServer) notAllowed(w http.ResponseWriter, r *http.Request, allowed string) {
|
||||
s.cfg.MethodNotAllowed(w, r, allowed)
|
||||
}
|
||||
|
||||
func (cfg serverConfig) baseServer() (s baseServer, err error) {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -12,6 +12,221 @@ import (
|
||||
"github.com/ogen-go/ogen/validate"
|
||||
)
|
||||
|
||||
// Encode implements json.Marshaler.
|
||||
func (s *AOREvent) Encode(e *jx.Encoder) {
|
||||
e.ObjStart()
|
||||
s.encodeFields(e)
|
||||
e.ObjEnd()
|
||||
}
|
||||
|
||||
// encodeFields encodes fields.
|
||||
func (s *AOREvent) encodeFields(e *jx.Encoder) {
|
||||
{
|
||||
e.FieldStart("ID")
|
||||
e.Int64(s.ID)
|
||||
}
|
||||
{
|
||||
e.FieldStart("StartDate")
|
||||
e.Int64(s.StartDate)
|
||||
}
|
||||
{
|
||||
e.FieldStart("FreezeDate")
|
||||
e.Int64(s.FreezeDate)
|
||||
}
|
||||
{
|
||||
e.FieldStart("SelectionDate")
|
||||
e.Int64(s.SelectionDate)
|
||||
}
|
||||
{
|
||||
e.FieldStart("DecisionDate")
|
||||
e.Int64(s.DecisionDate)
|
||||
}
|
||||
{
|
||||
e.FieldStart("Status")
|
||||
e.Int32(s.Status)
|
||||
}
|
||||
{
|
||||
e.FieldStart("CreatedAt")
|
||||
e.Int64(s.CreatedAt)
|
||||
}
|
||||
{
|
||||
e.FieldStart("UpdatedAt")
|
||||
e.Int64(s.UpdatedAt)
|
||||
}
|
||||
}
|
||||
|
||||
var jsonFieldsNameOfAOREvent = [8]string{
|
||||
0: "ID",
|
||||
1: "StartDate",
|
||||
2: "FreezeDate",
|
||||
3: "SelectionDate",
|
||||
4: "DecisionDate",
|
||||
5: "Status",
|
||||
6: "CreatedAt",
|
||||
7: "UpdatedAt",
|
||||
}
|
||||
|
||||
// Decode decodes AOREvent from json.
|
||||
func (s *AOREvent) Decode(d *jx.Decoder) error {
|
||||
if s == nil {
|
||||
return errors.New("invalid: unable to decode AOREvent to nil")
|
||||
}
|
||||
var requiredBitSet [1]uint8
|
||||
|
||||
if err := d.ObjBytes(func(d *jx.Decoder, k []byte) error {
|
||||
switch string(k) {
|
||||
case "ID":
|
||||
requiredBitSet[0] |= 1 << 0
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.ID = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"ID\"")
|
||||
}
|
||||
case "StartDate":
|
||||
requiredBitSet[0] |= 1 << 1
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.StartDate = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"StartDate\"")
|
||||
}
|
||||
case "FreezeDate":
|
||||
requiredBitSet[0] |= 1 << 2
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.FreezeDate = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"FreezeDate\"")
|
||||
}
|
||||
case "SelectionDate":
|
||||
requiredBitSet[0] |= 1 << 3
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.SelectionDate = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"SelectionDate\"")
|
||||
}
|
||||
case "DecisionDate":
|
||||
requiredBitSet[0] |= 1 << 4
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.DecisionDate = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"DecisionDate\"")
|
||||
}
|
||||
case "Status":
|
||||
requiredBitSet[0] |= 1 << 5
|
||||
if err := func() error {
|
||||
v, err := d.Int32()
|
||||
s.Status = int32(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"Status\"")
|
||||
}
|
||||
case "CreatedAt":
|
||||
requiredBitSet[0] |= 1 << 6
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.CreatedAt = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"CreatedAt\"")
|
||||
}
|
||||
case "UpdatedAt":
|
||||
requiredBitSet[0] |= 1 << 7
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.UpdatedAt = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"UpdatedAt\"")
|
||||
}
|
||||
default:
|
||||
return d.Skip()
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
return errors.Wrap(err, "decode AOREvent")
|
||||
}
|
||||
// Validate required fields.
|
||||
var failures []validate.FieldError
|
||||
for i, mask := range [1]uint8{
|
||||
0b11111111,
|
||||
} {
|
||||
if result := (requiredBitSet[i] & mask) ^ mask; result != 0 {
|
||||
// Mask only required fields and check equality to mask using XOR.
|
||||
//
|
||||
// If XOR result is not zero, result is not equal to expected, so some fields are missed.
|
||||
// Bits of fields which would be set are actually bits of missed fields.
|
||||
missed := bits.OnesCount8(result)
|
||||
for bitN := 0; bitN < missed; bitN++ {
|
||||
bitIdx := bits.TrailingZeros8(result)
|
||||
fieldIdx := i*8 + bitIdx
|
||||
var name string
|
||||
if fieldIdx < len(jsonFieldsNameOfAOREvent) {
|
||||
name = jsonFieldsNameOfAOREvent[fieldIdx]
|
||||
} else {
|
||||
name = strconv.Itoa(fieldIdx)
|
||||
}
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: name,
|
||||
Error: validate.ErrFieldRequired,
|
||||
})
|
||||
// Reset bit.
|
||||
result &^= 1 << bitIdx
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(failures) > 0 {
|
||||
return &validate.Error{Fields: failures}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarshalJSON implements stdjson.Marshaler.
|
||||
func (s *AOREvent) MarshalJSON() ([]byte, error) {
|
||||
e := jx.Encoder{}
|
||||
s.Encode(&e)
|
||||
return e.Bytes(), nil
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements stdjson.Unmarshaler.
|
||||
func (s *AOREvent) UnmarshalJSON(data []byte) error {
|
||||
d := jx.DecodeBytes(data)
|
||||
return s.Decode(d)
|
||||
}
|
||||
|
||||
// Encode implements json.Marshaler.
|
||||
func (s *AuditEvent) Encode(e *jx.Encoder) {
|
||||
e.ObjStart()
|
||||
@@ -4435,6 +4650,334 @@ func (s *Submission) UnmarshalJSON(data []byte) error {
|
||||
return s.Decode(d)
|
||||
}
|
||||
|
||||
// Encode implements json.Marshaler.
|
||||
func (s *SubmissionReview) Encode(e *jx.Encoder) {
|
||||
e.ObjStart()
|
||||
s.encodeFields(e)
|
||||
e.ObjEnd()
|
||||
}
|
||||
|
||||
// encodeFields encodes fields.
|
||||
func (s *SubmissionReview) encodeFields(e *jx.Encoder) {
|
||||
{
|
||||
e.FieldStart("ID")
|
||||
e.Int64(s.ID)
|
||||
}
|
||||
{
|
||||
e.FieldStart("SubmissionID")
|
||||
e.Int64(s.SubmissionID)
|
||||
}
|
||||
{
|
||||
e.FieldStart("ReviewerID")
|
||||
e.Int64(s.ReviewerID)
|
||||
}
|
||||
{
|
||||
e.FieldStart("Recommend")
|
||||
e.Bool(s.Recommend)
|
||||
}
|
||||
{
|
||||
e.FieldStart("Description")
|
||||
e.Str(s.Description)
|
||||
}
|
||||
{
|
||||
e.FieldStart("Outdated")
|
||||
e.Bool(s.Outdated)
|
||||
}
|
||||
{
|
||||
e.FieldStart("CreatedAt")
|
||||
e.Int64(s.CreatedAt)
|
||||
}
|
||||
{
|
||||
e.FieldStart("UpdatedAt")
|
||||
e.Int64(s.UpdatedAt)
|
||||
}
|
||||
}
|
||||
|
||||
var jsonFieldsNameOfSubmissionReview = [8]string{
|
||||
0: "ID",
|
||||
1: "SubmissionID",
|
||||
2: "ReviewerID",
|
||||
3: "Recommend",
|
||||
4: "Description",
|
||||
5: "Outdated",
|
||||
6: "CreatedAt",
|
||||
7: "UpdatedAt",
|
||||
}
|
||||
|
||||
// Decode decodes SubmissionReview from json.
|
||||
func (s *SubmissionReview) Decode(d *jx.Decoder) error {
|
||||
if s == nil {
|
||||
return errors.New("invalid: unable to decode SubmissionReview to nil")
|
||||
}
|
||||
var requiredBitSet [1]uint8
|
||||
|
||||
if err := d.ObjBytes(func(d *jx.Decoder, k []byte) error {
|
||||
switch string(k) {
|
||||
case "ID":
|
||||
requiredBitSet[0] |= 1 << 0
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.ID = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"ID\"")
|
||||
}
|
||||
case "SubmissionID":
|
||||
requiredBitSet[0] |= 1 << 1
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.SubmissionID = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"SubmissionID\"")
|
||||
}
|
||||
case "ReviewerID":
|
||||
requiredBitSet[0] |= 1 << 2
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.ReviewerID = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"ReviewerID\"")
|
||||
}
|
||||
case "Recommend":
|
||||
requiredBitSet[0] |= 1 << 3
|
||||
if err := func() error {
|
||||
v, err := d.Bool()
|
||||
s.Recommend = bool(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"Recommend\"")
|
||||
}
|
||||
case "Description":
|
||||
requiredBitSet[0] |= 1 << 4
|
||||
if err := func() error {
|
||||
v, err := d.Str()
|
||||
s.Description = string(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"Description\"")
|
||||
}
|
||||
case "Outdated":
|
||||
requiredBitSet[0] |= 1 << 5
|
||||
if err := func() error {
|
||||
v, err := d.Bool()
|
||||
s.Outdated = bool(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"Outdated\"")
|
||||
}
|
||||
case "CreatedAt":
|
||||
requiredBitSet[0] |= 1 << 6
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.CreatedAt = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"CreatedAt\"")
|
||||
}
|
||||
case "UpdatedAt":
|
||||
requiredBitSet[0] |= 1 << 7
|
||||
if err := func() error {
|
||||
v, err := d.Int64()
|
||||
s.UpdatedAt = int64(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"UpdatedAt\"")
|
||||
}
|
||||
default:
|
||||
return d.Skip()
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
return errors.Wrap(err, "decode SubmissionReview")
|
||||
}
|
||||
// Validate required fields.
|
||||
var failures []validate.FieldError
|
||||
for i, mask := range [1]uint8{
|
||||
0b11111111,
|
||||
} {
|
||||
if result := (requiredBitSet[i] & mask) ^ mask; result != 0 {
|
||||
// Mask only required fields and check equality to mask using XOR.
|
||||
//
|
||||
// If XOR result is not zero, result is not equal to expected, so some fields are missed.
|
||||
// Bits of fields which would be set are actually bits of missed fields.
|
||||
missed := bits.OnesCount8(result)
|
||||
for bitN := 0; bitN < missed; bitN++ {
|
||||
bitIdx := bits.TrailingZeros8(result)
|
||||
fieldIdx := i*8 + bitIdx
|
||||
var name string
|
||||
if fieldIdx < len(jsonFieldsNameOfSubmissionReview) {
|
||||
name = jsonFieldsNameOfSubmissionReview[fieldIdx]
|
||||
} else {
|
||||
name = strconv.Itoa(fieldIdx)
|
||||
}
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: name,
|
||||
Error: validate.ErrFieldRequired,
|
||||
})
|
||||
// Reset bit.
|
||||
result &^= 1 << bitIdx
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(failures) > 0 {
|
||||
return &validate.Error{Fields: failures}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarshalJSON implements stdjson.Marshaler.
|
||||
func (s *SubmissionReview) MarshalJSON() ([]byte, error) {
|
||||
e := jx.Encoder{}
|
||||
s.Encode(&e)
|
||||
return e.Bytes(), nil
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements stdjson.Unmarshaler.
|
||||
func (s *SubmissionReview) UnmarshalJSON(data []byte) error {
|
||||
d := jx.DecodeBytes(data)
|
||||
return s.Decode(d)
|
||||
}
|
||||
|
||||
// Encode implements json.Marshaler.
|
||||
func (s *SubmissionReviewCreate) Encode(e *jx.Encoder) {
|
||||
e.ObjStart()
|
||||
s.encodeFields(e)
|
||||
e.ObjEnd()
|
||||
}
|
||||
|
||||
// encodeFields encodes fields.
|
||||
func (s *SubmissionReviewCreate) encodeFields(e *jx.Encoder) {
|
||||
{
|
||||
e.FieldStart("Recommend")
|
||||
e.Bool(s.Recommend)
|
||||
}
|
||||
{
|
||||
e.FieldStart("Description")
|
||||
e.Str(s.Description)
|
||||
}
|
||||
}
|
||||
|
||||
var jsonFieldsNameOfSubmissionReviewCreate = [2]string{
|
||||
0: "Recommend",
|
||||
1: "Description",
|
||||
}
|
||||
|
||||
// Decode decodes SubmissionReviewCreate from json.
|
||||
func (s *SubmissionReviewCreate) Decode(d *jx.Decoder) error {
|
||||
if s == nil {
|
||||
return errors.New("invalid: unable to decode SubmissionReviewCreate to nil")
|
||||
}
|
||||
var requiredBitSet [1]uint8
|
||||
|
||||
if err := d.ObjBytes(func(d *jx.Decoder, k []byte) error {
|
||||
switch string(k) {
|
||||
case "Recommend":
|
||||
requiredBitSet[0] |= 1 << 0
|
||||
if err := func() error {
|
||||
v, err := d.Bool()
|
||||
s.Recommend = bool(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"Recommend\"")
|
||||
}
|
||||
case "Description":
|
||||
requiredBitSet[0] |= 1 << 1
|
||||
if err := func() error {
|
||||
v, err := d.Str()
|
||||
s.Description = string(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return errors.Wrap(err, "decode field \"Description\"")
|
||||
}
|
||||
default:
|
||||
return d.Skip()
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
return errors.Wrap(err, "decode SubmissionReviewCreate")
|
||||
}
|
||||
// Validate required fields.
|
||||
var failures []validate.FieldError
|
||||
for i, mask := range [1]uint8{
|
||||
0b00000011,
|
||||
} {
|
||||
if result := (requiredBitSet[i] & mask) ^ mask; result != 0 {
|
||||
// Mask only required fields and check equality to mask using XOR.
|
||||
//
|
||||
// If XOR result is not zero, result is not equal to expected, so some fields are missed.
|
||||
// Bits of fields which would be set are actually bits of missed fields.
|
||||
missed := bits.OnesCount8(result)
|
||||
for bitN := 0; bitN < missed; bitN++ {
|
||||
bitIdx := bits.TrailingZeros8(result)
|
||||
fieldIdx := i*8 + bitIdx
|
||||
var name string
|
||||
if fieldIdx < len(jsonFieldsNameOfSubmissionReviewCreate) {
|
||||
name = jsonFieldsNameOfSubmissionReviewCreate[fieldIdx]
|
||||
} else {
|
||||
name = strconv.Itoa(fieldIdx)
|
||||
}
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: name,
|
||||
Error: validate.ErrFieldRequired,
|
||||
})
|
||||
// Reset bit.
|
||||
result &^= 1 << bitIdx
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(failures) > 0 {
|
||||
return &validate.Error{Fields: failures}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarshalJSON implements stdjson.Marshaler.
|
||||
func (s *SubmissionReviewCreate) MarshalJSON() ([]byte, error) {
|
||||
e := jx.Encoder{}
|
||||
s.Encode(&e)
|
||||
return e.Bytes(), nil
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements stdjson.Unmarshaler.
|
||||
func (s *SubmissionReviewCreate) UnmarshalJSON(data []byte) error {
|
||||
d := jx.DecodeBytes(data)
|
||||
return s.Decode(d)
|
||||
}
|
||||
|
||||
// Encode implements json.Marshaler.
|
||||
func (s *SubmissionTriggerCreate) Encode(e *jx.Encoder) {
|
||||
e.ObjStart()
|
||||
|
||||
@@ -33,7 +33,6 @@ const (
|
||||
BatchAssetThumbnailsOperation OperationName = "BatchAssetThumbnails"
|
||||
BatchUserThumbnailsOperation OperationName = "BatchUserThumbnails"
|
||||
BatchUsernamesOperation OperationName = "BatchUsernames"
|
||||
CombobulateOperation OperationName = "Combobulate"
|
||||
CreateMapfixOperation OperationName = "CreateMapfix"
|
||||
CreateMapfixAuditCommentOperation OperationName = "CreateMapfixAuditComment"
|
||||
CreateScriptOperation OperationName = "CreateScript"
|
||||
@@ -41,9 +40,13 @@ const (
|
||||
CreateSubmissionOperation OperationName = "CreateSubmission"
|
||||
CreateSubmissionAdminOperation OperationName = "CreateSubmissionAdmin"
|
||||
CreateSubmissionAuditCommentOperation OperationName = "CreateSubmissionAuditComment"
|
||||
CreateSubmissionReviewOperation OperationName = "CreateSubmissionReview"
|
||||
DeleteScriptOperation OperationName = "DeleteScript"
|
||||
DeleteScriptPolicyOperation OperationName = "DeleteScriptPolicy"
|
||||
DownloadMapAssetOperation OperationName = "DownloadMapAsset"
|
||||
GetAOREventOperation OperationName = "GetAOREvent"
|
||||
GetAOREventSubmissionsOperation OperationName = "GetAOREventSubmissions"
|
||||
GetActiveAOREventOperation OperationName = "GetActiveAOREvent"
|
||||
GetAssetThumbnailOperation OperationName = "GetAssetThumbnail"
|
||||
GetMapOperation OperationName = "GetMap"
|
||||
GetMapfixOperation OperationName = "GetMapfix"
|
||||
@@ -53,15 +56,16 @@ const (
|
||||
GetStatsOperation OperationName = "GetStats"
|
||||
GetSubmissionOperation OperationName = "GetSubmission"
|
||||
GetUserThumbnailOperation OperationName = "GetUserThumbnail"
|
||||
ListAOREventsOperation OperationName = "ListAOREvents"
|
||||
ListMapfixAuditEventsOperation OperationName = "ListMapfixAuditEvents"
|
||||
ListMapfixesOperation OperationName = "ListMapfixes"
|
||||
ListMapsOperation OperationName = "ListMaps"
|
||||
ListScriptPolicyOperation OperationName = "ListScriptPolicy"
|
||||
ListScriptsOperation OperationName = "ListScripts"
|
||||
ListSubmissionAuditEventsOperation OperationName = "ListSubmissionAuditEvents"
|
||||
ListSubmissionReviewsOperation OperationName = "ListSubmissionReviews"
|
||||
ListSubmissionsOperation OperationName = "ListSubmissions"
|
||||
ReleaseSubmissionsOperation OperationName = "ReleaseSubmissions"
|
||||
SeedCombobulatorOperation OperationName = "SeedCombobulator"
|
||||
SessionRolesOperation OperationName = "SessionRoles"
|
||||
SessionUserOperation OperationName = "SessionUser"
|
||||
SessionValidateOperation OperationName = "SessionValidate"
|
||||
@@ -72,4 +76,5 @@ const (
|
||||
UpdateScriptOperation OperationName = "UpdateScript"
|
||||
UpdateScriptPolicyOperation OperationName = "UpdateScriptPolicy"
|
||||
UpdateSubmissionModelOperation OperationName = "UpdateSubmissionModel"
|
||||
UpdateSubmissionReviewOperation OperationName = "UpdateSubmissionReview"
|
||||
)
|
||||
|
||||
@@ -2030,89 +2030,6 @@ func decodeActionSubmissionValidatedParams(args [1]string, argsEscaped bool, r *
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// CombobulateParams is parameters of combobulate operation.
|
||||
type CombobulateParams struct {
|
||||
MapID int64
|
||||
}
|
||||
|
||||
func unpackCombobulateParams(packed middleware.Parameters) (params CombobulateParams) {
|
||||
{
|
||||
key := middleware.ParameterKey{
|
||||
Name: "MapID",
|
||||
In: "path",
|
||||
}
|
||||
params.MapID = packed[key].(int64)
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
func decodeCombobulateParams(args [1]string, argsEscaped bool, r *http.Request) (params CombobulateParams, _ error) {
|
||||
// Decode path: MapID.
|
||||
if err := func() error {
|
||||
param := args[0]
|
||||
if argsEscaped {
|
||||
unescaped, err := url.PathUnescape(args[0])
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "unescape path")
|
||||
}
|
||||
param = unescaped
|
||||
}
|
||||
if len(param) > 0 {
|
||||
d := uri.NewPathDecoder(uri.PathDecoderConfig{
|
||||
Param: "MapID",
|
||||
Value: param,
|
||||
Style: uri.PathStyleSimple,
|
||||
Explode: false,
|
||||
})
|
||||
|
||||
if err := func() error {
|
||||
val, err := d.DecodeValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := conv.ToInt64(val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
params.MapID = c
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(params.MapID)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return validate.ErrFieldRequired
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return params, &ogenerrors.DecodeParamError{
|
||||
Name: "MapID",
|
||||
In: "path",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// CreateMapfixAuditCommentParams is parameters of createMapfixAuditComment operation.
|
||||
type CreateMapfixAuditCommentParams struct {
|
||||
// The unique identifier for a mapfix.
|
||||
@@ -2281,6 +2198,90 @@ func decodeCreateSubmissionAuditCommentParams(args [1]string, argsEscaped bool,
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// CreateSubmissionReviewParams is parameters of createSubmissionReview operation.
|
||||
type CreateSubmissionReviewParams struct {
|
||||
// The unique identifier for a submission.
|
||||
SubmissionID int64
|
||||
}
|
||||
|
||||
func unpackCreateSubmissionReviewParams(packed middleware.Parameters) (params CreateSubmissionReviewParams) {
|
||||
{
|
||||
key := middleware.ParameterKey{
|
||||
Name: "SubmissionID",
|
||||
In: "path",
|
||||
}
|
||||
params.SubmissionID = packed[key].(int64)
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
func decodeCreateSubmissionReviewParams(args [1]string, argsEscaped bool, r *http.Request) (params CreateSubmissionReviewParams, _ error) {
|
||||
// Decode path: SubmissionID.
|
||||
if err := func() error {
|
||||
param := args[0]
|
||||
if argsEscaped {
|
||||
unescaped, err := url.PathUnescape(args[0])
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "unescape path")
|
||||
}
|
||||
param = unescaped
|
||||
}
|
||||
if len(param) > 0 {
|
||||
d := uri.NewPathDecoder(uri.PathDecoderConfig{
|
||||
Param: "SubmissionID",
|
||||
Value: param,
|
||||
Style: uri.PathStyleSimple,
|
||||
Explode: false,
|
||||
})
|
||||
|
||||
if err := func() error {
|
||||
val, err := d.DecodeValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := conv.ToInt64(val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
params.SubmissionID = c
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(params.SubmissionID)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return validate.ErrFieldRequired
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return params, &ogenerrors.DecodeParamError{
|
||||
Name: "SubmissionID",
|
||||
In: "path",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// DeleteScriptParams is parameters of deleteScript operation.
|
||||
type DeleteScriptParams struct {
|
||||
// The unique identifier for a script.
|
||||
@@ -2532,6 +2533,172 @@ func decodeDownloadMapAssetParams(args [1]string, argsEscaped bool, r *http.Requ
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// GetAOREventParams is parameters of getAOREvent operation.
|
||||
type GetAOREventParams struct {
|
||||
AOREventID int64
|
||||
}
|
||||
|
||||
func unpackGetAOREventParams(packed middleware.Parameters) (params GetAOREventParams) {
|
||||
{
|
||||
key := middleware.ParameterKey{
|
||||
Name: "AOREventID",
|
||||
In: "path",
|
||||
}
|
||||
params.AOREventID = packed[key].(int64)
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
func decodeGetAOREventParams(args [1]string, argsEscaped bool, r *http.Request) (params GetAOREventParams, _ error) {
|
||||
// Decode path: AOREventID.
|
||||
if err := func() error {
|
||||
param := args[0]
|
||||
if argsEscaped {
|
||||
unescaped, err := url.PathUnescape(args[0])
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "unescape path")
|
||||
}
|
||||
param = unescaped
|
||||
}
|
||||
if len(param) > 0 {
|
||||
d := uri.NewPathDecoder(uri.PathDecoderConfig{
|
||||
Param: "AOREventID",
|
||||
Value: param,
|
||||
Style: uri.PathStyleSimple,
|
||||
Explode: false,
|
||||
})
|
||||
|
||||
if err := func() error {
|
||||
val, err := d.DecodeValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := conv.ToInt64(val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
params.AOREventID = c
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 1,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(params.AOREventID)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return validate.ErrFieldRequired
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return params, &ogenerrors.DecodeParamError{
|
||||
Name: "AOREventID",
|
||||
In: "path",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// GetAOREventSubmissionsParams is parameters of getAOREventSubmissions operation.
|
||||
type GetAOREventSubmissionsParams struct {
|
||||
AOREventID int64
|
||||
}
|
||||
|
||||
func unpackGetAOREventSubmissionsParams(packed middleware.Parameters) (params GetAOREventSubmissionsParams) {
|
||||
{
|
||||
key := middleware.ParameterKey{
|
||||
Name: "AOREventID",
|
||||
In: "path",
|
||||
}
|
||||
params.AOREventID = packed[key].(int64)
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
func decodeGetAOREventSubmissionsParams(args [1]string, argsEscaped bool, r *http.Request) (params GetAOREventSubmissionsParams, _ error) {
|
||||
// Decode path: AOREventID.
|
||||
if err := func() error {
|
||||
param := args[0]
|
||||
if argsEscaped {
|
||||
unescaped, err := url.PathUnescape(args[0])
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "unescape path")
|
||||
}
|
||||
param = unescaped
|
||||
}
|
||||
if len(param) > 0 {
|
||||
d := uri.NewPathDecoder(uri.PathDecoderConfig{
|
||||
Param: "AOREventID",
|
||||
Value: param,
|
||||
Style: uri.PathStyleSimple,
|
||||
Explode: false,
|
||||
})
|
||||
|
||||
if err := func() error {
|
||||
val, err := d.DecodeValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := conv.ToInt64(val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
params.AOREventID = c
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 1,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(params.AOREventID)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return validate.ErrFieldRequired
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return params, &ogenerrors.DecodeParamError{
|
||||
Name: "AOREventID",
|
||||
In: "path",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// GetAssetThumbnailParams is parameters of getAssetThumbnail operation.
|
||||
type GetAssetThumbnailParams struct {
|
||||
AssetID uint64
|
||||
@@ -3310,6 +3477,143 @@ func decodeGetUserThumbnailParams(args [1]string, argsEscaped bool, r *http.Requ
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// ListAOREventsParams is parameters of listAOREvents operation.
|
||||
type ListAOREventsParams struct {
|
||||
Page int32
|
||||
Limit int32
|
||||
}
|
||||
|
||||
func unpackListAOREventsParams(packed middleware.Parameters) (params ListAOREventsParams) {
|
||||
{
|
||||
key := middleware.ParameterKey{
|
||||
Name: "Page",
|
||||
In: "query",
|
||||
}
|
||||
params.Page = packed[key].(int32)
|
||||
}
|
||||
{
|
||||
key := middleware.ParameterKey{
|
||||
Name: "Limit",
|
||||
In: "query",
|
||||
}
|
||||
params.Limit = packed[key].(int32)
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
func decodeListAOREventsParams(args [0]string, argsEscaped bool, r *http.Request) (params ListAOREventsParams, _ error) {
|
||||
q := uri.NewQueryDecoder(r.URL.Query())
|
||||
// Decode query: Page.
|
||||
if err := func() error {
|
||||
cfg := uri.QueryParameterDecodingConfig{
|
||||
Name: "Page",
|
||||
Style: uri.QueryStyleForm,
|
||||
Explode: true,
|
||||
}
|
||||
|
||||
if err := q.HasParam(cfg); err == nil {
|
||||
if err := q.DecodeParam(cfg, func(d uri.Decoder) error {
|
||||
val, err := d.DecodeValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := conv.ToInt32(val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
params.Page = c
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 1,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(params.Page)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return params, &ogenerrors.DecodeParamError{
|
||||
Name: "Page",
|
||||
In: "query",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
// Decode query: Limit.
|
||||
if err := func() error {
|
||||
cfg := uri.QueryParameterDecodingConfig{
|
||||
Name: "Limit",
|
||||
Style: uri.QueryStyleForm,
|
||||
Explode: true,
|
||||
}
|
||||
|
||||
if err := q.HasParam(cfg); err == nil {
|
||||
if err := q.DecodeParam(cfg, func(d uri.Decoder) error {
|
||||
val, err := d.DecodeValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := conv.ToInt32(val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
params.Limit = c
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: true,
|
||||
Max: 100,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(params.Limit)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return params, &ogenerrors.DecodeParamError{
|
||||
Name: "Limit",
|
||||
In: "query",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// ListMapfixAuditEventsParams is parameters of listMapfixAuditEvents operation.
|
||||
type ListMapfixAuditEventsParams struct {
|
||||
// The unique identifier for a mapfix.
|
||||
@@ -5899,6 +6203,90 @@ func decodeListSubmissionAuditEventsParams(args [1]string, argsEscaped bool, r *
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// ListSubmissionReviewsParams is parameters of listSubmissionReviews operation.
|
||||
type ListSubmissionReviewsParams struct {
|
||||
// The unique identifier for a submission.
|
||||
SubmissionID int64
|
||||
}
|
||||
|
||||
func unpackListSubmissionReviewsParams(packed middleware.Parameters) (params ListSubmissionReviewsParams) {
|
||||
{
|
||||
key := middleware.ParameterKey{
|
||||
Name: "SubmissionID",
|
||||
In: "path",
|
||||
}
|
||||
params.SubmissionID = packed[key].(int64)
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
func decodeListSubmissionReviewsParams(args [1]string, argsEscaped bool, r *http.Request) (params ListSubmissionReviewsParams, _ error) {
|
||||
// Decode path: SubmissionID.
|
||||
if err := func() error {
|
||||
param := args[0]
|
||||
if argsEscaped {
|
||||
unescaped, err := url.PathUnescape(args[0])
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "unescape path")
|
||||
}
|
||||
param = unescaped
|
||||
}
|
||||
if len(param) > 0 {
|
||||
d := uri.NewPathDecoder(uri.PathDecoderConfig{
|
||||
Param: "SubmissionID",
|
||||
Value: param,
|
||||
Style: uri.PathStyleSimple,
|
||||
Explode: false,
|
||||
})
|
||||
|
||||
if err := func() error {
|
||||
val, err := d.DecodeValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := conv.ToInt64(val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
params.SubmissionID = c
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(params.SubmissionID)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return validate.ErrFieldRequired
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return params, &ogenerrors.DecodeParamError{
|
||||
Name: "SubmissionID",
|
||||
In: "path",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// ListSubmissionsParams is parameters of listSubmissions operation.
|
||||
type ListSubmissionsParams struct {
|
||||
Page int32
|
||||
@@ -7570,3 +7958,158 @@ func decodeUpdateSubmissionModelParams(args [1]string, argsEscaped bool, r *http
|
||||
}
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// UpdateSubmissionReviewParams is parameters of updateSubmissionReview operation.
|
||||
type UpdateSubmissionReviewParams struct {
|
||||
// The unique identifier for a submission.
|
||||
SubmissionID int64
|
||||
ReviewID int64
|
||||
}
|
||||
|
||||
func unpackUpdateSubmissionReviewParams(packed middleware.Parameters) (params UpdateSubmissionReviewParams) {
|
||||
{
|
||||
key := middleware.ParameterKey{
|
||||
Name: "SubmissionID",
|
||||
In: "path",
|
||||
}
|
||||
params.SubmissionID = packed[key].(int64)
|
||||
}
|
||||
{
|
||||
key := middleware.ParameterKey{
|
||||
Name: "ReviewID",
|
||||
In: "path",
|
||||
}
|
||||
params.ReviewID = packed[key].(int64)
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
func decodeUpdateSubmissionReviewParams(args [2]string, argsEscaped bool, r *http.Request) (params UpdateSubmissionReviewParams, _ error) {
|
||||
// Decode path: SubmissionID.
|
||||
if err := func() error {
|
||||
param := args[0]
|
||||
if argsEscaped {
|
||||
unescaped, err := url.PathUnescape(args[0])
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "unescape path")
|
||||
}
|
||||
param = unescaped
|
||||
}
|
||||
if len(param) > 0 {
|
||||
d := uri.NewPathDecoder(uri.PathDecoderConfig{
|
||||
Param: "SubmissionID",
|
||||
Value: param,
|
||||
Style: uri.PathStyleSimple,
|
||||
Explode: false,
|
||||
})
|
||||
|
||||
if err := func() error {
|
||||
val, err := d.DecodeValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := conv.ToInt64(val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
params.SubmissionID = c
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(params.SubmissionID)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return validate.ErrFieldRequired
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return params, &ogenerrors.DecodeParamError{
|
||||
Name: "SubmissionID",
|
||||
In: "path",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
// Decode path: ReviewID.
|
||||
if err := func() error {
|
||||
param := args[1]
|
||||
if argsEscaped {
|
||||
unescaped, err := url.PathUnescape(args[1])
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "unescape path")
|
||||
}
|
||||
param = unescaped
|
||||
}
|
||||
if len(param) > 0 {
|
||||
d := uri.NewPathDecoder(uri.PathDecoderConfig{
|
||||
Param: "ReviewID",
|
||||
Value: param,
|
||||
Style: uri.PathStyleSimple,
|
||||
Explode: false,
|
||||
})
|
||||
|
||||
if err := func() error {
|
||||
val, err := d.DecodeValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := conv.ToInt64(val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
params.ReviewID = c
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(params.ReviewID)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return validate.ErrFieldRequired
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return params, &ogenerrors.DecodeParamError{
|
||||
Name: "ReviewID",
|
||||
In: "path",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
return params, nil
|
||||
}
|
||||
|
||||
@@ -717,6 +717,85 @@ func (s *Server) decodeCreateSubmissionAuditCommentRequest(r *http.Request) (
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeCreateSubmissionReviewRequest(r *http.Request) (
|
||||
req *SubmissionReviewCreate,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
var closers []func() error
|
||||
close = func() error {
|
||||
var merr error
|
||||
// Close in reverse order, to match defer behavior.
|
||||
for i := len(closers) - 1; i >= 0; i-- {
|
||||
c := closers[i]
|
||||
merr = errors.Join(merr, c())
|
||||
}
|
||||
return merr
|
||||
}
|
||||
defer func() {
|
||||
if rerr != nil {
|
||||
rerr = errors.Join(rerr, close())
|
||||
}
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request SubmissionReviewCreate
|
||||
if err := func() error {
|
||||
if err := request.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeReleaseSubmissionsRequest(r *http.Request) (
|
||||
req []ReleaseInfo,
|
||||
rawBody []byte,
|
||||
@@ -1021,3 +1100,82 @@ func (s *Server) decodeUpdateScriptPolicyRequest(r *http.Request) (
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeUpdateSubmissionReviewRequest(r *http.Request) (
|
||||
req *SubmissionReviewCreate,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
var closers []func() error
|
||||
close = func() error {
|
||||
var merr error
|
||||
// Close in reverse order, to match defer behavior.
|
||||
for i := len(closers) - 1; i >= 0; i-- {
|
||||
c := closers[i]
|
||||
merr = errors.Join(merr, c())
|
||||
}
|
||||
return merr
|
||||
}
|
||||
defer func() {
|
||||
if rerr != nil {
|
||||
rerr = errors.Join(rerr, close())
|
||||
}
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request SubmissionReviewCreate
|
||||
if err := func() error {
|
||||
if err := request.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -142,6 +142,20 @@ func encodeCreateSubmissionAuditCommentRequest(
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeCreateSubmissionReviewRequest(
|
||||
req *SubmissionReviewCreate,
|
||||
r *http.Request,
|
||||
) error {
|
||||
const contentType = "application/json"
|
||||
e := new(jx.Encoder)
|
||||
{
|
||||
req.Encode(e)
|
||||
}
|
||||
encoded := e.Bytes()
|
||||
ht.SetBody(r, bytes.NewReader(encoded), contentType)
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeReleaseSubmissionsRequest(
|
||||
req []ReleaseInfo,
|
||||
r *http.Request,
|
||||
@@ -197,3 +211,17 @@ func encodeUpdateScriptPolicyRequest(
|
||||
ht.SetBody(r, bytes.NewReader(encoded), contentType)
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeUpdateSubmissionReviewRequest(
|
||||
req *SubmissionReviewCreate,
|
||||
r *http.Request,
|
||||
) error {
|
||||
const contentType = "application/json"
|
||||
e := new(jx.Encoder)
|
||||
{
|
||||
req.Encode(e)
|
||||
}
|
||||
encoded := e.Bytes()
|
||||
ht.SetBody(r, bytes.NewReader(encoded), contentType)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1733,66 +1733,6 @@ func decodeBatchUsernamesResponse(resp *http.Response) (res *BatchUsernamesOK, _
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeCombobulateResponse(resp *http.Response) (res *CombobulateNoContent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 204:
|
||||
// Code 204.
|
||||
return &CombobulateNoContent{}, nil
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeCreateMapfixResponse(resp *http.Response) (res *OperationID, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 201:
|
||||
@@ -2418,6 +2358,107 @@ func decodeCreateSubmissionAuditCommentResponse(resp *http.Response) (res *Creat
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeCreateSubmissionReviewResponse(resp *http.Response) (res *SubmissionReview, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
// Code 200.
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response SubmissionReview
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeDeleteScriptResponse(resp *http.Response) (res *DeleteScriptNoContent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 204:
|
||||
@@ -2614,6 +2655,334 @@ func decodeDownloadMapAssetResponse(resp *http.Response) (res DownloadMapAssetOK
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeGetAOREventResponse(resp *http.Response) (res *AOREvent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
// Code 200.
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response AOREvent
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeGetAOREventSubmissionsResponse(resp *http.Response) (res []Submission, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
// Code 200.
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response []Submission
|
||||
if err := func() error {
|
||||
response = make([]Submission, 0)
|
||||
if err := d.Arr(func(d *jx.Decoder) error {
|
||||
var elem Submission
|
||||
if err := elem.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
response = append(response, elem)
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if response == nil {
|
||||
return errors.New("nil is invalid value")
|
||||
}
|
||||
var failures []validate.FieldError
|
||||
for i, elem := range response {
|
||||
if err := func() error {
|
||||
if err := elem.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: fmt.Sprintf("[%d]", i),
|
||||
Error: err,
|
||||
})
|
||||
}
|
||||
}
|
||||
if len(failures) > 0 {
|
||||
return &validate.Error{Fields: failures}
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeGetActiveAOREventResponse(resp *http.Response) (res *AOREvent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
// Code 200.
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response AOREvent
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeGetAssetThumbnailResponse(resp *http.Response) (res *GetAssetThumbnailFound, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 302:
|
||||
@@ -3519,6 +3888,132 @@ func decodeGetUserThumbnailResponse(resp *http.Response) (res *GetUserThumbnailF
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeListAOREventsResponse(resp *http.Response) (res []AOREvent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
// Code 200.
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response []AOREvent
|
||||
if err := func() error {
|
||||
response = make([]AOREvent, 0)
|
||||
if err := d.Arr(func(d *jx.Decoder) error {
|
||||
var elem AOREvent
|
||||
if err := elem.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
response = append(response, elem)
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if response == nil {
|
||||
return errors.New("nil is invalid value")
|
||||
}
|
||||
var failures []validate.FieldError
|
||||
for i, elem := range response {
|
||||
if err := func() error {
|
||||
if err := elem.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: fmt.Sprintf("[%d]", i),
|
||||
Error: err,
|
||||
})
|
||||
}
|
||||
}
|
||||
if len(failures) > 0 {
|
||||
return &validate.Error{Fields: failures}
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeListMapfixAuditEventsResponse(resp *http.Response) (res []AuditEvent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
@@ -4250,6 +4745,132 @@ func decodeListSubmissionAuditEventsResponse(resp *http.Response) (res []AuditEv
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeListSubmissionReviewsResponse(resp *http.Response) (res []SubmissionReview, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
// Code 200.
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response []SubmissionReview
|
||||
if err := func() error {
|
||||
response = make([]SubmissionReview, 0)
|
||||
if err := d.Arr(func(d *jx.Decoder) error {
|
||||
var elem SubmissionReview
|
||||
if err := elem.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
response = append(response, elem)
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if response == nil {
|
||||
return errors.New("nil is invalid value")
|
||||
}
|
||||
var failures []validate.FieldError
|
||||
for i, elem := range response {
|
||||
if err := func() error {
|
||||
if err := elem.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: fmt.Sprintf("[%d]", i),
|
||||
Error: err,
|
||||
})
|
||||
}
|
||||
}
|
||||
if len(failures) > 0 {
|
||||
return &validate.Error{Fields: failures}
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeListSubmissionsResponse(resp *http.Response) (res *Submissions, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
@@ -4452,66 +5073,6 @@ func decodeReleaseSubmissionsResponse(resp *http.Response) (res *OperationID, _
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeSeedCombobulatorResponse(resp *http.Response) (res *SeedCombobulatorNoContent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 204:
|
||||
// Code 204.
|
||||
return &SeedCombobulatorNoContent{}, nil
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeSessionRolesResponse(resp *http.Response) (res *Roles, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
@@ -5227,3 +5788,104 @@ func decodeUpdateSubmissionModelResponse(resp *http.Response) (res *UpdateSubmis
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeUpdateSubmissionReviewResponse(resp *http.Response) (res *SubmissionReview, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
// Code 200.
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response SubmissionReview
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
@@ -225,13 +225,6 @@ func encodeBatchUsernamesResponse(response *BatchUsernamesOK, w http.ResponseWri
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeCombobulateResponse(response *CombobulateNoContent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.WriteHeader(204)
|
||||
span.SetStatus(codes.Ok, http.StatusText(204))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeCreateMapfixResponse(response *OperationID, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(201)
|
||||
@@ -316,6 +309,20 @@ func encodeCreateSubmissionAuditCommentResponse(response *CreateSubmissionAuditC
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeCreateSubmissionReviewResponse(response *SubmissionReview, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
span.SetStatus(codes.Ok, http.StatusText(200))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
response.Encode(e)
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeDeleteScriptResponse(response *DeleteScriptNoContent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.WriteHeader(204)
|
||||
span.SetStatus(codes.Ok, http.StatusText(204))
|
||||
@@ -346,8 +353,53 @@ func encodeDownloadMapAssetResponse(response DownloadMapAssetOK, w http.Response
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeGetAOREventResponse(response *AOREvent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
span.SetStatus(codes.Ok, http.StatusText(200))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
response.Encode(e)
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeGetAOREventSubmissionsResponse(response []Submission, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
span.SetStatus(codes.Ok, http.StatusText(200))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
e.ArrStart()
|
||||
for _, elem := range response {
|
||||
elem.Encode(e)
|
||||
}
|
||||
e.ArrEnd()
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeGetActiveAOREventResponse(response *AOREvent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
span.SetStatus(codes.Ok, http.StatusText(200))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
response.Encode(e)
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeGetAssetThumbnailResponse(response *GetAssetThumbnailFound, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Access-Control-Expose-Headers", "Location")
|
||||
// Encoding response headers.
|
||||
{
|
||||
h := uri.NewHeaderEncoder(w.Header())
|
||||
@@ -472,7 +524,6 @@ func encodeGetSubmissionResponse(response *Submission, w http.ResponseWriter, sp
|
||||
}
|
||||
|
||||
func encodeGetUserThumbnailResponse(response *GetUserThumbnailFound, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Access-Control-Expose-Headers", "Location")
|
||||
// Encoding response headers.
|
||||
{
|
||||
h := uri.NewHeaderEncoder(w.Header())
|
||||
@@ -498,6 +549,24 @@ func encodeGetUserThumbnailResponse(response *GetUserThumbnailFound, w http.Resp
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeListAOREventsResponse(response []AOREvent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
span.SetStatus(codes.Ok, http.StatusText(200))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
e.ArrStart()
|
||||
for _, elem := range response {
|
||||
elem.Encode(e)
|
||||
}
|
||||
e.ArrEnd()
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeListMapfixAuditEventsResponse(response []AuditEvent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
@@ -602,6 +671,24 @@ func encodeListSubmissionAuditEventsResponse(response []AuditEvent, w http.Respo
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeListSubmissionReviewsResponse(response []SubmissionReview, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
span.SetStatus(codes.Ok, http.StatusText(200))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
e.ArrStart()
|
||||
for _, elem := range response {
|
||||
elem.Encode(e)
|
||||
}
|
||||
e.ArrEnd()
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeListSubmissionsResponse(response *Submissions, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
@@ -630,13 +717,6 @@ func encodeReleaseSubmissionsResponse(response *OperationID, w http.ResponseWrit
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeSeedCombobulatorResponse(response *SeedCombobulatorNoContent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.WriteHeader(204)
|
||||
span.SetStatus(codes.Ok, http.StatusText(204))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeSessionRolesResponse(response *Roles, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
@@ -728,6 +808,20 @@ func encodeUpdateSubmissionModelResponse(response *UpdateSubmissionModelNoConten
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeUpdateSubmissionReviewResponse(response *SubmissionReview, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
span.SetStatus(codes.Ok, http.StatusText(200))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
response.Encode(e)
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeErrorResponse(response *ErrorStatusCode, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
code := response.StatusCode
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,6 +15,104 @@ func (s *ErrorStatusCode) Error() string {
|
||||
return fmt.Sprintf("code %d: %+v", s.StatusCode, s.Response)
|
||||
}
|
||||
|
||||
// Ref: #/components/schemas/AOREvent
|
||||
type AOREvent struct {
|
||||
ID int64 `json:"ID"`
|
||||
// Unix timestamp for the 1st day of AOR month.
|
||||
StartDate int64 `json:"StartDate"`
|
||||
// Unix timestamp when submissions are frozen.
|
||||
FreezeDate int64 `json:"FreezeDate"`
|
||||
// Unix timestamp when automatic selection occurs (end of week 1).
|
||||
SelectionDate int64 `json:"SelectionDate"`
|
||||
// Unix timestamp when final accept/reject decisions are made (end of month).
|
||||
DecisionDate int64 `json:"DecisionDate"`
|
||||
// AOR Event Status: * `0` - Scheduled * `1` - Open * `2` - Frozen * `3` - Selected * `4` - Completed
|
||||
// * `5` - Closed.
|
||||
Status int32 `json:"Status"`
|
||||
CreatedAt int64 `json:"CreatedAt"`
|
||||
UpdatedAt int64 `json:"UpdatedAt"`
|
||||
}
|
||||
|
||||
// GetID returns the value of ID.
|
||||
func (s *AOREvent) GetID() int64 {
|
||||
return s.ID
|
||||
}
|
||||
|
||||
// GetStartDate returns the value of StartDate.
|
||||
func (s *AOREvent) GetStartDate() int64 {
|
||||
return s.StartDate
|
||||
}
|
||||
|
||||
// GetFreezeDate returns the value of FreezeDate.
|
||||
func (s *AOREvent) GetFreezeDate() int64 {
|
||||
return s.FreezeDate
|
||||
}
|
||||
|
||||
// GetSelectionDate returns the value of SelectionDate.
|
||||
func (s *AOREvent) GetSelectionDate() int64 {
|
||||
return s.SelectionDate
|
||||
}
|
||||
|
||||
// GetDecisionDate returns the value of DecisionDate.
|
||||
func (s *AOREvent) GetDecisionDate() int64 {
|
||||
return s.DecisionDate
|
||||
}
|
||||
|
||||
// GetStatus returns the value of Status.
|
||||
func (s *AOREvent) GetStatus() int32 {
|
||||
return s.Status
|
||||
}
|
||||
|
||||
// GetCreatedAt returns the value of CreatedAt.
|
||||
func (s *AOREvent) GetCreatedAt() int64 {
|
||||
return s.CreatedAt
|
||||
}
|
||||
|
||||
// GetUpdatedAt returns the value of UpdatedAt.
|
||||
func (s *AOREvent) GetUpdatedAt() int64 {
|
||||
return s.UpdatedAt
|
||||
}
|
||||
|
||||
// SetID sets the value of ID.
|
||||
func (s *AOREvent) SetID(val int64) {
|
||||
s.ID = val
|
||||
}
|
||||
|
||||
// SetStartDate sets the value of StartDate.
|
||||
func (s *AOREvent) SetStartDate(val int64) {
|
||||
s.StartDate = val
|
||||
}
|
||||
|
||||
// SetFreezeDate sets the value of FreezeDate.
|
||||
func (s *AOREvent) SetFreezeDate(val int64) {
|
||||
s.FreezeDate = val
|
||||
}
|
||||
|
||||
// SetSelectionDate sets the value of SelectionDate.
|
||||
func (s *AOREvent) SetSelectionDate(val int64) {
|
||||
s.SelectionDate = val
|
||||
}
|
||||
|
||||
// SetDecisionDate sets the value of DecisionDate.
|
||||
func (s *AOREvent) SetDecisionDate(val int64) {
|
||||
s.DecisionDate = val
|
||||
}
|
||||
|
||||
// SetStatus sets the value of Status.
|
||||
func (s *AOREvent) SetStatus(val int32) {
|
||||
s.Status = val
|
||||
}
|
||||
|
||||
// SetCreatedAt sets the value of CreatedAt.
|
||||
func (s *AOREvent) SetCreatedAt(val int64) {
|
||||
s.CreatedAt = val
|
||||
}
|
||||
|
||||
// SetUpdatedAt sets the value of UpdatedAt.
|
||||
func (s *AOREvent) SetUpdatedAt(val int64) {
|
||||
s.UpdatedAt = val
|
||||
}
|
||||
|
||||
// ActionMapfixAcceptedNoContent is response for ActionMapfixAccepted operation.
|
||||
type ActionMapfixAcceptedNoContent struct{}
|
||||
|
||||
@@ -441,9 +539,6 @@ func (s *BatchUsernamesReq) SetUserIds(val []uint64) {
|
||||
s.UserIds = val
|
||||
}
|
||||
|
||||
// CombobulateNoContent is response for Combobulate operation.
|
||||
type CombobulateNoContent struct{}
|
||||
|
||||
type CookieAuth struct {
|
||||
APIKey string
|
||||
Roles []string
|
||||
@@ -1996,9 +2091,6 @@ func (s *ScriptUpdate) SetResourceID(val OptInt64) {
|
||||
s.ResourceID = val
|
||||
}
|
||||
|
||||
// SeedCombobulatorNoContent is response for SeedCombobulator operation.
|
||||
type SeedCombobulatorNoContent struct{}
|
||||
|
||||
// SetMapfixCompletedNoContent is response for SetMapfixCompleted operation.
|
||||
type SetMapfixCompletedNoContent struct{}
|
||||
|
||||
@@ -2240,6 +2332,129 @@ func (s *Submission) SetStatusID(val int32) {
|
||||
s.StatusID = val
|
||||
}
|
||||
|
||||
// Ref: #/components/schemas/SubmissionReview
|
||||
type SubmissionReview struct {
|
||||
ID int64 `json:"ID"`
|
||||
SubmissionID int64 `json:"SubmissionID"`
|
||||
ReviewerID int64 `json:"ReviewerID"`
|
||||
// Whether the reviewer recommends accepting the submission.
|
||||
Recommend bool `json:"Recommend"`
|
||||
// Text description of the review reasoning.
|
||||
Description string `json:"Description"`
|
||||
// Flag indicating if the review is outdated due to submission changes.
|
||||
Outdated bool `json:"Outdated"`
|
||||
CreatedAt int64 `json:"CreatedAt"`
|
||||
UpdatedAt int64 `json:"UpdatedAt"`
|
||||
}
|
||||
|
||||
// GetID returns the value of ID.
|
||||
func (s *SubmissionReview) GetID() int64 {
|
||||
return s.ID
|
||||
}
|
||||
|
||||
// GetSubmissionID returns the value of SubmissionID.
|
||||
func (s *SubmissionReview) GetSubmissionID() int64 {
|
||||
return s.SubmissionID
|
||||
}
|
||||
|
||||
// GetReviewerID returns the value of ReviewerID.
|
||||
func (s *SubmissionReview) GetReviewerID() int64 {
|
||||
return s.ReviewerID
|
||||
}
|
||||
|
||||
// GetRecommend returns the value of Recommend.
|
||||
func (s *SubmissionReview) GetRecommend() bool {
|
||||
return s.Recommend
|
||||
}
|
||||
|
||||
// GetDescription returns the value of Description.
|
||||
func (s *SubmissionReview) GetDescription() string {
|
||||
return s.Description
|
||||
}
|
||||
|
||||
// GetOutdated returns the value of Outdated.
|
||||
func (s *SubmissionReview) GetOutdated() bool {
|
||||
return s.Outdated
|
||||
}
|
||||
|
||||
// GetCreatedAt returns the value of CreatedAt.
|
||||
func (s *SubmissionReview) GetCreatedAt() int64 {
|
||||
return s.CreatedAt
|
||||
}
|
||||
|
||||
// GetUpdatedAt returns the value of UpdatedAt.
|
||||
func (s *SubmissionReview) GetUpdatedAt() int64 {
|
||||
return s.UpdatedAt
|
||||
}
|
||||
|
||||
// SetID sets the value of ID.
|
||||
func (s *SubmissionReview) SetID(val int64) {
|
||||
s.ID = val
|
||||
}
|
||||
|
||||
// SetSubmissionID sets the value of SubmissionID.
|
||||
func (s *SubmissionReview) SetSubmissionID(val int64) {
|
||||
s.SubmissionID = val
|
||||
}
|
||||
|
||||
// SetReviewerID sets the value of ReviewerID.
|
||||
func (s *SubmissionReview) SetReviewerID(val int64) {
|
||||
s.ReviewerID = val
|
||||
}
|
||||
|
||||
// SetRecommend sets the value of Recommend.
|
||||
func (s *SubmissionReview) SetRecommend(val bool) {
|
||||
s.Recommend = val
|
||||
}
|
||||
|
||||
// SetDescription sets the value of Description.
|
||||
func (s *SubmissionReview) SetDescription(val string) {
|
||||
s.Description = val
|
||||
}
|
||||
|
||||
// SetOutdated sets the value of Outdated.
|
||||
func (s *SubmissionReview) SetOutdated(val bool) {
|
||||
s.Outdated = val
|
||||
}
|
||||
|
||||
// SetCreatedAt sets the value of CreatedAt.
|
||||
func (s *SubmissionReview) SetCreatedAt(val int64) {
|
||||
s.CreatedAt = val
|
||||
}
|
||||
|
||||
// SetUpdatedAt sets the value of UpdatedAt.
|
||||
func (s *SubmissionReview) SetUpdatedAt(val int64) {
|
||||
s.UpdatedAt = val
|
||||
}
|
||||
|
||||
// Ref: #/components/schemas/SubmissionReviewCreate
|
||||
type SubmissionReviewCreate struct {
|
||||
// Whether the reviewer recommends accepting the submission.
|
||||
Recommend bool `json:"Recommend"`
|
||||
// Text description of the review reasoning.
|
||||
Description string `json:"Description"`
|
||||
}
|
||||
|
||||
// GetRecommend returns the value of Recommend.
|
||||
func (s *SubmissionReviewCreate) GetRecommend() bool {
|
||||
return s.Recommend
|
||||
}
|
||||
|
||||
// GetDescription returns the value of Description.
|
||||
func (s *SubmissionReviewCreate) GetDescription() string {
|
||||
return s.Description
|
||||
}
|
||||
|
||||
// SetRecommend sets the value of Recommend.
|
||||
func (s *SubmissionReviewCreate) SetRecommend(val bool) {
|
||||
s.Recommend = val
|
||||
}
|
||||
|
||||
// SetDescription sets the value of Description.
|
||||
func (s *SubmissionReviewCreate) SetDescription(val string) {
|
||||
s.Description = val
|
||||
}
|
||||
|
||||
// Ref: #/components/schemas/SubmissionTriggerCreate
|
||||
type SubmissionTriggerCreate struct {
|
||||
AssetID int64 `json:"AssetID"`
|
||||
|
||||
@@ -32,7 +32,6 @@ func findAuthorization(h http.Header, prefix string) (string, bool) {
|
||||
return "", false
|
||||
}
|
||||
|
||||
// operationRolesCookieAuth is a private map storing roles per operation.
|
||||
var operationRolesCookieAuth = map[string][]string{
|
||||
ActionMapfixAcceptedOperation: []string{},
|
||||
ActionMapfixRejectOperation: []string{},
|
||||
@@ -58,7 +57,6 @@ var operationRolesCookieAuth = map[string][]string{
|
||||
ActionSubmissionTriggerUploadOperation: []string{},
|
||||
ActionSubmissionTriggerValidateOperation: []string{},
|
||||
ActionSubmissionValidatedOperation: []string{},
|
||||
CombobulateOperation: []string{},
|
||||
CreateMapfixOperation: []string{},
|
||||
CreateMapfixAuditCommentOperation: []string{},
|
||||
CreateScriptOperation: []string{},
|
||||
@@ -66,12 +64,13 @@ var operationRolesCookieAuth = map[string][]string{
|
||||
CreateSubmissionOperation: []string{},
|
||||
CreateSubmissionAdminOperation: []string{},
|
||||
CreateSubmissionAuditCommentOperation: []string{},
|
||||
CreateSubmissionReviewOperation: []string{},
|
||||
DeleteScriptOperation: []string{},
|
||||
DeleteScriptPolicyOperation: []string{},
|
||||
DownloadMapAssetOperation: []string{},
|
||||
GetOperationOperation: []string{},
|
||||
ListSubmissionReviewsOperation: []string{},
|
||||
ReleaseSubmissionsOperation: []string{},
|
||||
SeedCombobulatorOperation: []string{},
|
||||
SessionRolesOperation: []string{},
|
||||
SessionUserOperation: []string{},
|
||||
SessionValidateOperation: []string{},
|
||||
@@ -82,27 +81,7 @@ var operationRolesCookieAuth = map[string][]string{
|
||||
UpdateScriptOperation: []string{},
|
||||
UpdateScriptPolicyOperation: []string{},
|
||||
UpdateSubmissionModelOperation: []string{},
|
||||
}
|
||||
|
||||
// GetRolesForCookieAuth returns the required roles for the given operation.
|
||||
//
|
||||
// This is useful for authorization scenarios where you need to know which roles
|
||||
// are required for an operation.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// requiredRoles := GetRolesForCookieAuth(AddPetOperation)
|
||||
//
|
||||
// Returns nil if the operation has no role requirements or if the operation is unknown.
|
||||
func GetRolesForCookieAuth(operation string) []string {
|
||||
roles, ok := operationRolesCookieAuth[operation]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
// Return a copy to prevent external modification
|
||||
result := make([]string, len(roles))
|
||||
copy(result, roles)
|
||||
return result
|
||||
UpdateSubmissionReviewOperation: []string{},
|
||||
}
|
||||
|
||||
func (s *Server) securityCookieAuth(ctx context.Context, operationName OperationName, req *http.Request) (context.Context, bool, error) {
|
||||
|
||||
@@ -173,12 +173,6 @@ type Handler interface {
|
||||
//
|
||||
// POST /usernames
|
||||
BatchUsernames(ctx context.Context, req *BatchUsernamesReq) (*BatchUsernamesOK, error)
|
||||
// Combobulate implements combobulate operation.
|
||||
//
|
||||
// Queue a map for combobulator processing.
|
||||
//
|
||||
// POST /maps/{MapID}/combobulate
|
||||
Combobulate(ctx context.Context, params CombobulateParams) error
|
||||
// CreateMapfix implements createMapfix operation.
|
||||
//
|
||||
// Trigger the validator to create a mapfix.
|
||||
@@ -221,6 +215,12 @@ type Handler interface {
|
||||
//
|
||||
// POST /submissions/{SubmissionID}/comment
|
||||
CreateSubmissionAuditComment(ctx context.Context, req CreateSubmissionAuditCommentReq, params CreateSubmissionAuditCommentParams) error
|
||||
// CreateSubmissionReview implements createSubmissionReview operation.
|
||||
//
|
||||
// Create a review for a submission.
|
||||
//
|
||||
// POST /submissions/{SubmissionID}/reviews
|
||||
CreateSubmissionReview(ctx context.Context, req *SubmissionReviewCreate, params CreateSubmissionReviewParams) (*SubmissionReview, error)
|
||||
// DeleteScript implements deleteScript operation.
|
||||
//
|
||||
// Delete the specified script by ID.
|
||||
@@ -239,6 +239,24 @@ type Handler interface {
|
||||
//
|
||||
// GET /maps/{MapID}/download
|
||||
DownloadMapAsset(ctx context.Context, params DownloadMapAssetParams) (DownloadMapAssetOK, error)
|
||||
// GetAOREvent implements getAOREvent operation.
|
||||
//
|
||||
// Get a specific AOR event.
|
||||
//
|
||||
// GET /aor-events/{AOREventID}
|
||||
GetAOREvent(ctx context.Context, params GetAOREventParams) (*AOREvent, error)
|
||||
// GetAOREventSubmissions implements getAOREventSubmissions operation.
|
||||
//
|
||||
// Get all submissions for a specific AOR event.
|
||||
//
|
||||
// GET /aor-events/{AOREventID}/submissions
|
||||
GetAOREventSubmissions(ctx context.Context, params GetAOREventSubmissionsParams) ([]Submission, error)
|
||||
// GetActiveAOREvent implements getActiveAOREvent operation.
|
||||
//
|
||||
// Get the currently active AOR event.
|
||||
//
|
||||
// GET /aor-events/active
|
||||
GetActiveAOREvent(ctx context.Context) (*AOREvent, error)
|
||||
// GetAssetThumbnail implements getAssetThumbnail operation.
|
||||
//
|
||||
// Get single asset thumbnail.
|
||||
@@ -293,6 +311,12 @@ type Handler interface {
|
||||
//
|
||||
// GET /thumbnails/user/{UserID}
|
||||
GetUserThumbnail(ctx context.Context, params GetUserThumbnailParams) (*GetUserThumbnailFound, error)
|
||||
// ListAOREvents implements listAOREvents operation.
|
||||
//
|
||||
// Get list of AOR events.
|
||||
//
|
||||
// GET /aor-events
|
||||
ListAOREvents(ctx context.Context, params ListAOREventsParams) ([]AOREvent, error)
|
||||
// ListMapfixAuditEvents implements listMapfixAuditEvents operation.
|
||||
//
|
||||
// Retrieve a list of audit events.
|
||||
@@ -329,6 +353,12 @@ type Handler interface {
|
||||
//
|
||||
// GET /submissions/{SubmissionID}/audit-events
|
||||
ListSubmissionAuditEvents(ctx context.Context, params ListSubmissionAuditEventsParams) ([]AuditEvent, error)
|
||||
// ListSubmissionReviews implements listSubmissionReviews operation.
|
||||
//
|
||||
// Get all reviews for a submission.
|
||||
//
|
||||
// GET /submissions/{SubmissionID}/reviews
|
||||
ListSubmissionReviews(ctx context.Context, params ListSubmissionReviewsParams) ([]SubmissionReview, error)
|
||||
// ListSubmissions implements listSubmissions operation.
|
||||
//
|
||||
// Get list of submissions.
|
||||
@@ -341,12 +371,6 @@ type Handler interface {
|
||||
//
|
||||
// POST /release-submissions
|
||||
ReleaseSubmissions(ctx context.Context, req []ReleaseInfo) (*OperationID, error)
|
||||
// SeedCombobulator implements seedCombobulator operation.
|
||||
//
|
||||
// Queue all maps for combobulator processing.
|
||||
//
|
||||
// POST /maps-admin/seed-combobulator
|
||||
SeedCombobulator(ctx context.Context) error
|
||||
// SessionRoles implements sessionRoles operation.
|
||||
//
|
||||
// Get list of roles for the current session.
|
||||
@@ -407,6 +431,12 @@ type Handler interface {
|
||||
//
|
||||
// POST /submissions/{SubmissionID}/model
|
||||
UpdateSubmissionModel(ctx context.Context, params UpdateSubmissionModelParams) error
|
||||
// UpdateSubmissionReview implements updateSubmissionReview operation.
|
||||
//
|
||||
// Update an existing review.
|
||||
//
|
||||
// PATCH /submissions/{SubmissionID}/reviews/{ReviewID}
|
||||
UpdateSubmissionReview(ctx context.Context, req *SubmissionReviewCreate, params UpdateSubmissionReviewParams) (*SubmissionReview, error)
|
||||
// NewError creates *ErrorStatusCode from error returned by handler.
|
||||
//
|
||||
// Used for common default response.
|
||||
|
||||
@@ -259,15 +259,6 @@ func (UnimplementedHandler) BatchUsernames(ctx context.Context, req *BatchUserna
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// Combobulate implements combobulate operation.
|
||||
//
|
||||
// Queue a map for combobulator processing.
|
||||
//
|
||||
// POST /maps/{MapID}/combobulate
|
||||
func (UnimplementedHandler) Combobulate(ctx context.Context, params CombobulateParams) error {
|
||||
return ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// CreateMapfix implements createMapfix operation.
|
||||
//
|
||||
// Trigger the validator to create a mapfix.
|
||||
@@ -331,6 +322,15 @@ func (UnimplementedHandler) CreateSubmissionAuditComment(ctx context.Context, re
|
||||
return ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// CreateSubmissionReview implements createSubmissionReview operation.
|
||||
//
|
||||
// Create a review for a submission.
|
||||
//
|
||||
// POST /submissions/{SubmissionID}/reviews
|
||||
func (UnimplementedHandler) CreateSubmissionReview(ctx context.Context, req *SubmissionReviewCreate, params CreateSubmissionReviewParams) (r *SubmissionReview, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// DeleteScript implements deleteScript operation.
|
||||
//
|
||||
// Delete the specified script by ID.
|
||||
@@ -358,6 +358,33 @@ func (UnimplementedHandler) DownloadMapAsset(ctx context.Context, params Downloa
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// GetAOREvent implements getAOREvent operation.
|
||||
//
|
||||
// Get a specific AOR event.
|
||||
//
|
||||
// GET /aor-events/{AOREventID}
|
||||
func (UnimplementedHandler) GetAOREvent(ctx context.Context, params GetAOREventParams) (r *AOREvent, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// GetAOREventSubmissions implements getAOREventSubmissions operation.
|
||||
//
|
||||
// Get all submissions for a specific AOR event.
|
||||
//
|
||||
// GET /aor-events/{AOREventID}/submissions
|
||||
func (UnimplementedHandler) GetAOREventSubmissions(ctx context.Context, params GetAOREventSubmissionsParams) (r []Submission, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// GetActiveAOREvent implements getActiveAOREvent operation.
|
||||
//
|
||||
// Get the currently active AOR event.
|
||||
//
|
||||
// GET /aor-events/active
|
||||
func (UnimplementedHandler) GetActiveAOREvent(ctx context.Context) (r *AOREvent, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// GetAssetThumbnail implements getAssetThumbnail operation.
|
||||
//
|
||||
// Get single asset thumbnail.
|
||||
@@ -439,6 +466,15 @@ func (UnimplementedHandler) GetUserThumbnail(ctx context.Context, params GetUser
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// ListAOREvents implements listAOREvents operation.
|
||||
//
|
||||
// Get list of AOR events.
|
||||
//
|
||||
// GET /aor-events
|
||||
func (UnimplementedHandler) ListAOREvents(ctx context.Context, params ListAOREventsParams) (r []AOREvent, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// ListMapfixAuditEvents implements listMapfixAuditEvents operation.
|
||||
//
|
||||
// Retrieve a list of audit events.
|
||||
@@ -493,6 +529,15 @@ func (UnimplementedHandler) ListSubmissionAuditEvents(ctx context.Context, param
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// ListSubmissionReviews implements listSubmissionReviews operation.
|
||||
//
|
||||
// Get all reviews for a submission.
|
||||
//
|
||||
// GET /submissions/{SubmissionID}/reviews
|
||||
func (UnimplementedHandler) ListSubmissionReviews(ctx context.Context, params ListSubmissionReviewsParams) (r []SubmissionReview, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// ListSubmissions implements listSubmissions operation.
|
||||
//
|
||||
// Get list of submissions.
|
||||
@@ -511,15 +556,6 @@ func (UnimplementedHandler) ReleaseSubmissions(ctx context.Context, req []Releas
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// SeedCombobulator implements seedCombobulator operation.
|
||||
//
|
||||
// Queue all maps for combobulator processing.
|
||||
//
|
||||
// POST /maps-admin/seed-combobulator
|
||||
func (UnimplementedHandler) SeedCombobulator(ctx context.Context) error {
|
||||
return ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// SessionRoles implements sessionRoles operation.
|
||||
//
|
||||
// Get list of roles for the current session.
|
||||
@@ -610,6 +646,15 @@ func (UnimplementedHandler) UpdateSubmissionModel(ctx context.Context, params Up
|
||||
return ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// UpdateSubmissionReview implements updateSubmissionReview operation.
|
||||
//
|
||||
// Update an existing review.
|
||||
//
|
||||
// PATCH /submissions/{SubmissionID}/reviews/{ReviewID}
|
||||
func (UnimplementedHandler) UpdateSubmissionReview(ctx context.Context, req *SubmissionReviewCreate, params UpdateSubmissionReviewParams) (r *SubmissionReview, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// NewError creates *ErrorStatusCode from error returned by handler.
|
||||
//
|
||||
// Used for common default response.
|
||||
|
||||
@@ -9,6 +9,39 @@ import (
|
||||
"github.com/ogen-go/ogen/validate"
|
||||
)
|
||||
|
||||
func (s *AOREvent) Validate() error {
|
||||
if s == nil {
|
||||
return validate.ErrNilPointer
|
||||
}
|
||||
|
||||
var failures []validate.FieldError
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: true,
|
||||
Max: 5,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(s.Status)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: "Status",
|
||||
Error: err,
|
||||
})
|
||||
}
|
||||
if len(failures) > 0 {
|
||||
return &validate.Error{Fields: failures}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *AuditEvent) Validate() error {
|
||||
if s == nil {
|
||||
return validate.ErrNilPointer
|
||||
@@ -2311,6 +2344,181 @@ func (s *Submission) Validate() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *SubmissionReview) Validate() error {
|
||||
if s == nil {
|
||||
return validate.ErrNilPointer
|
||||
}
|
||||
|
||||
var failures []validate.FieldError
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(s.ID)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: "ID",
|
||||
Error: err,
|
||||
})
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(s.SubmissionID)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: "SubmissionID",
|
||||
Error: err,
|
||||
})
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(s.ReviewerID)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: "ReviewerID",
|
||||
Error: err,
|
||||
})
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.String{
|
||||
MinLength: 0,
|
||||
MinLengthSet: false,
|
||||
MaxLength: 2048,
|
||||
MaxLengthSet: true,
|
||||
Email: false,
|
||||
Hostname: false,
|
||||
Regex: nil,
|
||||
MinNumeric: 0,
|
||||
MinNumericSet: false,
|
||||
MaxNumeric: 0,
|
||||
MaxNumericSet: false,
|
||||
}).Validate(string(s.Description)); err != nil {
|
||||
return errors.Wrap(err, "string")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: "Description",
|
||||
Error: err,
|
||||
})
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(s.CreatedAt)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: "CreatedAt",
|
||||
Error: err,
|
||||
})
|
||||
}
|
||||
if err := func() error {
|
||||
if err := (validate.Int{
|
||||
MinSet: true,
|
||||
Min: 0,
|
||||
MaxSet: false,
|
||||
Max: 0,
|
||||
MinExclusive: false,
|
||||
MaxExclusive: false,
|
||||
MultipleOfSet: false,
|
||||
MultipleOf: 0,
|
||||
Pattern: nil,
|
||||
}).Validate(int64(s.UpdatedAt)); err != nil {
|
||||
return errors.Wrap(err, "int")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: "UpdatedAt",
|
||||
Error: err,
|
||||
})
|
||||
}
|
||||
if len(failures) > 0 {
|
||||
return &validate.Error{Fields: failures}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *SubmissionReviewCreate) Validate() error {
|
||||
if s == nil {
|
||||
return validate.ErrNilPointer
|
||||
}
|
||||
|
||||
var failures []validate.FieldError
|
||||
if err := func() error {
|
||||
if err := (validate.String{
|
||||
MinLength: 0,
|
||||
MinLengthSet: false,
|
||||
MaxLength: 2048,
|
||||
MaxLengthSet: true,
|
||||
Email: false,
|
||||
Hostname: false,
|
||||
Regex: nil,
|
||||
MinNumeric: 0,
|
||||
MinNumericSet: false,
|
||||
MaxNumeric: 0,
|
||||
MaxNumericSet: false,
|
||||
}).Validate(string(s.Description)); err != nil {
|
||||
return errors.Wrap(err, "string")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
failures = append(failures, validate.FieldError{
|
||||
Name: "Description",
|
||||
Error: err,
|
||||
})
|
||||
}
|
||||
if len(failures) > 0 {
|
||||
return &validate.Error{Fields: failures}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *SubmissionTriggerCreate) Validate() error {
|
||||
if s == nil {
|
||||
return validate.ErrNilPointer
|
||||
|
||||
75
pkg/cmds/aor.go
Normal file
75
pkg/cmds/aor.go
Normal file
@@ -0,0 +1,75 @@
|
||||
package cmds
|
||||
|
||||
import (
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore/gormstore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/service"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func NewAORCommand() *cli.Command {
|
||||
return &cli.Command{
|
||||
Name: "aor",
|
||||
Usage: "Run AOR (Accept or Reject) event processor",
|
||||
Action: runAORProcessor,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "pg-host",
|
||||
Usage: "Host of postgres database",
|
||||
EnvVars: []string{"PG_HOST"},
|
||||
Required: true,
|
||||
},
|
||||
&cli.IntFlag{
|
||||
Name: "pg-port",
|
||||
Usage: "Port of postgres database",
|
||||
EnvVars: []string{"PG_PORT"},
|
||||
Required: true,
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "pg-db",
|
||||
Usage: "Name of database to connect to",
|
||||
EnvVars: []string{"PG_DB"},
|
||||
Required: true,
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "pg-user",
|
||||
Usage: "User to connect with",
|
||||
EnvVars: []string{"PG_USER"},
|
||||
Required: true,
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "pg-password",
|
||||
Usage: "Password to connect with",
|
||||
EnvVars: []string{"PG_PASSWORD"},
|
||||
Required: true,
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "migrate",
|
||||
Usage: "Run database migrations",
|
||||
Value: false,
|
||||
EnvVars: []string{"MIGRATE"},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func runAORProcessor(ctx *cli.Context) error {
|
||||
log.Info("Starting AOR event processor")
|
||||
|
||||
// Connect to database
|
||||
db, err := gormstore.New(ctx)
|
||||
if err != nil {
|
||||
log.WithError(err).Fatal("failed to connect database")
|
||||
return err
|
||||
}
|
||||
|
||||
// Create scheduler and process events
|
||||
scheduler := service.NewAORScheduler(db)
|
||||
if err := scheduler.ProcessAOREvents(); err != nil {
|
||||
log.WithError(err).Error("AOR event processing failed")
|
||||
return err
|
||||
}
|
||||
|
||||
log.Info("AOR event processor completed successfully")
|
||||
return nil
|
||||
}
|
||||
@@ -8,8 +8,6 @@ import (
|
||||
"git.itzana.me/strafesnet/go-grpc/auth"
|
||||
"git.itzana.me/strafesnet/go-grpc/maps"
|
||||
"git.itzana.me/strafesnet/go-grpc/maps_extended"
|
||||
"git.itzana.me/strafesnet/go-grpc/mapfixes"
|
||||
"git.itzana.me/strafesnet/go-grpc/submissions"
|
||||
"git.itzana.me/strafesnet/go-grpc/users"
|
||||
"git.itzana.me/strafesnet/go-grpc/validator"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/api"
|
||||
@@ -19,8 +17,6 @@ import (
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/service"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/validator_controller"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/web_api"
|
||||
awsconfig "github.com/aws/aws-sdk-go-v2/config"
|
||||
"github.com/aws/aws-sdk-go-v2/service/s3"
|
||||
"github.com/nats-io/nats.go"
|
||||
"github.com/redis/go-redis/v9"
|
||||
log "github.com/sirupsen/logrus"
|
||||
@@ -125,12 +121,6 @@ func NewServeCommand() *cli.Command {
|
||||
EnvVars: []string{"REDIS_DB"},
|
||||
Value: 0,
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "s3-bucket",
|
||||
Usage: "S3 bucket for map assets",
|
||||
EnvVars: []string{"S3_BUCKET"},
|
||||
Required: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -152,7 +142,7 @@ func serve(ctx *cli.Context) error {
|
||||
_, err = js.AddStream(&nats.StreamConfig{
|
||||
Name: "maptest",
|
||||
Subjects: []string{"maptest.>"},
|
||||
Retention: nats.InterestPolicy,
|
||||
Retention: nats.WorkQueuePolicy,
|
||||
})
|
||||
if err != nil {
|
||||
log.WithError(err).Fatal("failed to add stream")
|
||||
@@ -176,13 +166,6 @@ func serve(ctx *cli.Context) error {
|
||||
ApiKey: ctx.String("rbx-api-key"),
|
||||
}
|
||||
|
||||
// Initialize S3 client
|
||||
awsCfg, err := awsconfig.LoadDefaultConfig(ctx.Context)
|
||||
if err != nil {
|
||||
log.WithError(err).Fatal("failed to load AWS config")
|
||||
}
|
||||
s3Client := s3.NewFromConfig(awsCfg)
|
||||
|
||||
// connect to main game database
|
||||
conn, err := grpc.Dial(ctx.String("data-rpc-host"), grpc.WithTransportCredentials(insecure.NewCredentials()))
|
||||
if err != nil {
|
||||
@@ -195,8 +178,6 @@ func serve(ctx *cli.Context) error {
|
||||
users.NewUsersServiceClient(conn),
|
||||
robloxClient,
|
||||
redisClient,
|
||||
s3Client,
|
||||
ctx.String("s3-bucket"),
|
||||
)
|
||||
|
||||
svc_external := web_api.NewService(
|
||||
@@ -223,11 +204,7 @@ func serve(ctx *cli.Context) error {
|
||||
grpcServer := grpc.NewServer()
|
||||
|
||||
maps_controller := controller.NewMapsController(&svc_inner)
|
||||
mapfixes_controller := controller.NewMapfixesController(&svc_inner)
|
||||
submissions_controller := controller.NewSubmissionsController(&svc_inner)
|
||||
maps_extended.RegisterMapsServiceServer(grpcServer,&maps_controller)
|
||||
mapfixes.RegisterMapfixesServiceServer(grpcServer,&mapfixes_controller)
|
||||
submissions.RegisterSubmissionsServiceServer(grpcServer,&submissions_controller)
|
||||
|
||||
mapfix_controller := validator_controller.NewMapfixesController(&svc_inner)
|
||||
operation_controller := validator_controller.NewOperationsController(&svc_inner)
|
||||
|
||||
@@ -1,149 +0,0 @@
|
||||
package controller
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"git.itzana.me/strafesnet/go-grpc/mapfixes"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/service"
|
||||
)
|
||||
|
||||
type Mapfixes struct {
|
||||
*mapfixes.UnimplementedMapfixesServiceServer
|
||||
inner *service.Service
|
||||
}
|
||||
|
||||
func NewMapfixesController(
|
||||
inner *service.Service,
|
||||
) Mapfixes {
|
||||
return Mapfixes{
|
||||
inner: inner,
|
||||
}
|
||||
}
|
||||
|
||||
func (svc *Mapfixes) Get(ctx context.Context, request *mapfixes.MapfixId) (*mapfixes.MapfixResponse, error) {
|
||||
item, err := svc.inner.GetMapfix(ctx, request.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
return &mapfixes.MapfixResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
TargetAssetID: item.TargetAssetID,
|
||||
StatusID: mapfixes.MapfixStatus(item.StatusID),
|
||||
}, nil
|
||||
}
|
||||
func (svc *Mapfixes) GetList(ctx context.Context, request *mapfixes.MapfixIdList) (*mapfixes.MapfixList, error) {
|
||||
items, err := svc.inner.GetMapfixList(ctx, request.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp := mapfixes.MapfixList{}
|
||||
resp.Mapfixes = make([]*mapfixes.MapfixResponse, len(items))
|
||||
for i, item := range items {
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
resp.Mapfixes[i] = &mapfixes.MapfixResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
TargetAssetID: item.TargetAssetID,
|
||||
StatusID: mapfixes.MapfixStatus(item.StatusID),
|
||||
}
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
func (svc *Mapfixes) List(ctx context.Context, request *mapfixes.ListRequest) (*mapfixes.MapfixList, error) {
|
||||
if request.Page == nil {
|
||||
return nil, PageError
|
||||
}
|
||||
|
||||
filter := service.NewMapfixFilter()
|
||||
if request.Filter != nil {
|
||||
if request.Filter.DisplayName != nil {
|
||||
filter.SetDisplayName(*request.Filter.DisplayName)
|
||||
}
|
||||
if request.Filter.Creator != nil {
|
||||
filter.SetCreator(*request.Filter.Creator)
|
||||
}
|
||||
if request.Filter.GameID != nil {
|
||||
filter.SetGameID(*request.Filter.GameID)
|
||||
}
|
||||
if request.Filter.Submitter != nil {
|
||||
filter.SetSubmitter(*request.Filter.Submitter)
|
||||
}
|
||||
}
|
||||
|
||||
items, err := svc.inner.ListMapfixes(ctx, filter, model.Page{
|
||||
Number: int32(request.Page.Number),
|
||||
Size: int32(request.Page.Size),
|
||||
}, datastore.ListSortDateDescending)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp := mapfixes.MapfixList{}
|
||||
resp.Mapfixes = make([]*mapfixes.MapfixResponse, len(items))
|
||||
for i, item := range items {
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
resp.Mapfixes[i] = &mapfixes.MapfixResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
TargetAssetID: item.TargetAssetID,
|
||||
StatusID: mapfixes.MapfixStatus(item.StatusID),
|
||||
}
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
@@ -195,13 +195,3 @@ func (svc *Maps) IncrementLoadCount(ctx context.Context, request *maps_extended.
|
||||
}
|
||||
return &maps_extended.NullResponse{}, nil
|
||||
}
|
||||
|
||||
func (svc *Maps) GetSnfmDownloadUrl(ctx context.Context, request *maps_extended.MapId) (*maps_extended.SnfmDownloadUrl, error) {
|
||||
url, err := svc.inner.GetSnfmDownloadUrl(ctx, request.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &maps_extended.SnfmDownloadUrl{
|
||||
Url: url,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -1,161 +0,0 @@
|
||||
package controller
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"git.itzana.me/strafesnet/go-grpc/submissions"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/service"
|
||||
)
|
||||
|
||||
type Submissions struct {
|
||||
*submissions.UnimplementedSubmissionsServiceServer
|
||||
inner *service.Service
|
||||
}
|
||||
|
||||
func NewSubmissionsController(
|
||||
inner *service.Service,
|
||||
) Submissions {
|
||||
return Submissions{
|
||||
inner: inner,
|
||||
}
|
||||
}
|
||||
|
||||
func (svc *Submissions) Get(ctx context.Context, request *submissions.SubmissionId) (*submissions.SubmissionResponse, error) {
|
||||
item, err := svc.inner.GetSubmission(ctx, request.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
var uploaded_asset_id *uint64
|
||||
if item.UploadedAssetID != 0 {
|
||||
uploaded_asset_id = &item.UploadedAssetID
|
||||
}
|
||||
return &submissions.SubmissionResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
UploadedAssetID: uploaded_asset_id,
|
||||
StatusID: submissions.SubmissionStatus(item.StatusID),
|
||||
}, nil
|
||||
}
|
||||
func (svc *Submissions) GetList(ctx context.Context, request *submissions.SubmissionIdList) (*submissions.SubmissionList, error) {
|
||||
items, err := svc.inner.GetSubmissionList(ctx, request.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp := submissions.SubmissionList{}
|
||||
resp.Submissions = make([]*submissions.SubmissionResponse, len(items))
|
||||
for i, item := range items {
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
var uploaded_asset_id *uint64
|
||||
if item.UploadedAssetID != 0 {
|
||||
uploaded_asset_id = &item.UploadedAssetID
|
||||
}
|
||||
resp.Submissions[i] = &submissions.SubmissionResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
UploadedAssetID: uploaded_asset_id,
|
||||
StatusID: submissions.SubmissionStatus(item.StatusID),
|
||||
}
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
func (svc *Submissions) List(ctx context.Context, request *submissions.ListRequest) (*submissions.SubmissionList, error) {
|
||||
if request.Page == nil {
|
||||
return nil, PageError
|
||||
}
|
||||
|
||||
filter := service.NewSubmissionFilter()
|
||||
if request.Filter != nil {
|
||||
if request.Filter.DisplayName != nil {
|
||||
filter.SetDisplayName(*request.Filter.DisplayName)
|
||||
}
|
||||
if request.Filter.Creator != nil {
|
||||
filter.SetCreator(*request.Filter.Creator)
|
||||
}
|
||||
if request.Filter.GameID != nil {
|
||||
filter.SetGameID(*request.Filter.GameID)
|
||||
}
|
||||
if request.Filter.Submitter != nil {
|
||||
filter.SetSubmitter(*request.Filter.Submitter)
|
||||
}
|
||||
}
|
||||
|
||||
items, err := svc.inner.ListSubmissions(ctx, filter, model.Page{
|
||||
Number: int32(request.Page.Number),
|
||||
Size: int32(request.Page.Size),
|
||||
}, datastore.ListSortDateDescending)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp := submissions.SubmissionList{}
|
||||
resp.Submissions = make([]*submissions.SubmissionResponse, len(items))
|
||||
for i, item := range items {
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
var uploaded_asset_id *uint64
|
||||
if item.UploadedAssetID != 0 {
|
||||
uploaded_asset_id = &item.UploadedAssetID
|
||||
}
|
||||
resp.Submissions[i] = &submissions.SubmissionResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
UploadedAssetID: uploaded_asset_id,
|
||||
StatusID: submissions.SubmissionStatus(item.StatusID),
|
||||
}
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
@@ -24,11 +24,14 @@ const (
|
||||
)
|
||||
|
||||
type Datastore interface {
|
||||
AOREvents() AOREvents
|
||||
AORSubmissions() AORSubmissions
|
||||
AuditEvents() AuditEvents
|
||||
Maps() Maps
|
||||
Mapfixes() Mapfixes
|
||||
Operations() Operations
|
||||
Submissions() Submissions
|
||||
SubmissionReviews() SubmissionReviews
|
||||
Scripts() Scripts
|
||||
ScriptPolicy() ScriptPolicy
|
||||
}
|
||||
@@ -47,7 +50,6 @@ type Maps interface {
|
||||
Create(ctx context.Context, smap model.Map) (model.Map, error)
|
||||
Update(ctx context.Context, id int64, values OptionalMap) error
|
||||
Delete(ctx context.Context, id int64) error
|
||||
GetAll(ctx context.Context) ([]model.Map, error)
|
||||
List(ctx context.Context, filters OptionalMap, page model.Page) ([]model.Map, error)
|
||||
IncrementLoadCount(ctx context.Context, id int64) error
|
||||
}
|
||||
@@ -84,6 +86,16 @@ type Submissions interface {
|
||||
ListWithTotal(ctx context.Context, filters OptionalMap, page model.Page, sort ListSort) (int64, []model.Submission, error)
|
||||
}
|
||||
|
||||
type SubmissionReviews interface {
|
||||
Get(ctx context.Context, id int64) (model.SubmissionReview, error)
|
||||
GetBySubmissionAndReviewer(ctx context.Context, submissionID int64, reviewerID uint64) (model.SubmissionReview, error)
|
||||
Create(ctx context.Context, review model.SubmissionReview) (model.SubmissionReview, error)
|
||||
Update(ctx context.Context, id int64, values OptionalMap) error
|
||||
Delete(ctx context.Context, id int64) error
|
||||
ListBySubmission(ctx context.Context, submissionID int64) ([]model.SubmissionReview, error)
|
||||
MarkOutdatedBySubmission(ctx context.Context, submissionID int64) error
|
||||
}
|
||||
|
||||
type Scripts interface {
|
||||
Get(ctx context.Context, id int64) (model.Script, error)
|
||||
Create(ctx context.Context, smap model.Script) (model.Script, error)
|
||||
@@ -100,3 +112,22 @@ type ScriptPolicy interface {
|
||||
Delete(ctx context.Context, id int64) error
|
||||
List(ctx context.Context, filters OptionalMap, page model.Page) ([]model.ScriptPolicy, error)
|
||||
}
|
||||
|
||||
type AOREvents interface {
|
||||
Get(ctx context.Context, id int64) (model.AOREvent, error)
|
||||
GetActive(ctx context.Context) (model.AOREvent, error)
|
||||
GetByStatus(ctx context.Context, status model.AOREventStatus) ([]model.AOREvent, error)
|
||||
Create(ctx context.Context, event model.AOREvent) (model.AOREvent, error)
|
||||
Update(ctx context.Context, id int64, values OptionalMap) error
|
||||
Delete(ctx context.Context, id int64) error
|
||||
List(ctx context.Context, filters OptionalMap, page model.Page) ([]model.AOREvent, error)
|
||||
}
|
||||
|
||||
type AORSubmissions interface {
|
||||
Get(ctx context.Context, id int64) (model.AORSubmission, error)
|
||||
GetByAOREvent(ctx context.Context, eventID int64) ([]model.AORSubmission, error)
|
||||
GetBySubmission(ctx context.Context, submissionID int64) ([]model.AORSubmission, error)
|
||||
Create(ctx context.Context, aorSubmission model.AORSubmission) (model.AORSubmission, error)
|
||||
Delete(ctx context.Context, id int64) error
|
||||
ListWithSubmissions(ctx context.Context, eventID int64) ([]model.Submission, error)
|
||||
}
|
||||
|
||||
@@ -23,14 +23,6 @@ func (q OptionalMap) AddNotNil(column string, value interface{}) OptionalMap {
|
||||
return q
|
||||
}
|
||||
|
||||
func (q OptionalMap) Pop(column string) (interface{}, bool) {
|
||||
value, ok := q.filter[column]
|
||||
if ok {
|
||||
delete(q.filter, column)
|
||||
}
|
||||
return value, ok
|
||||
}
|
||||
|
||||
func (q OptionalMap) Map() map[string]interface{} {
|
||||
return q.filter
|
||||
}
|
||||
|
||||
89
pkg/datastore/gormstore/aor_events.go
Normal file
89
pkg/datastore/gormstore/aor_events.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package gormstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type AOREvents struct {
|
||||
db *gorm.DB
|
||||
}
|
||||
|
||||
func (env *AOREvents) Get(ctx context.Context, id int64) (model.AOREvent, error) {
|
||||
var event model.AOREvent
|
||||
if err := env.db.First(&event, id).Error; err != nil {
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
return event, datastore.ErrNotExist
|
||||
}
|
||||
return event, err
|
||||
}
|
||||
return event, nil
|
||||
}
|
||||
|
||||
func (env *AOREvents) GetActive(ctx context.Context) (model.AOREvent, error) {
|
||||
var event model.AOREvent
|
||||
// Get the most recent non-closed event
|
||||
if err := env.db.Where("status != ?", model.AOREventStatusClosed).
|
||||
Order("start_date DESC").
|
||||
First(&event).Error; err != nil {
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
return event, datastore.ErrNotExist
|
||||
}
|
||||
return event, err
|
||||
}
|
||||
return event, nil
|
||||
}
|
||||
|
||||
func (env *AOREvents) GetByStatus(ctx context.Context, status model.AOREventStatus) ([]model.AOREvent, error) {
|
||||
var events []model.AOREvent
|
||||
if err := env.db.Where("status = ?", status).Order("start_date DESC").Find(&events).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return events, nil
|
||||
}
|
||||
|
||||
func (env *AOREvents) Create(ctx context.Context, event model.AOREvent) (model.AOREvent, error) {
|
||||
if err := env.db.Create(&event).Error; err != nil {
|
||||
return event, err
|
||||
}
|
||||
return event, nil
|
||||
}
|
||||
|
||||
func (env *AOREvents) Update(ctx context.Context, id int64, values datastore.OptionalMap) error {
|
||||
if err := env.db.Model(&model.AOREvent{}).Where("id = ?", id).Updates(values.Map()).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
return datastore.ErrNotExist
|
||||
}
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (env *AOREvents) Delete(ctx context.Context, id int64) error {
|
||||
if err := env.db.Delete(&model.AOREvent{}, id).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
return datastore.ErrNotExist
|
||||
}
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (env *AOREvents) List(ctx context.Context, filters datastore.OptionalMap, page model.Page) ([]model.AOREvent, error) {
|
||||
var events []model.AOREvent
|
||||
query := env.db.Where(filters.Map())
|
||||
|
||||
if page.Size > 0 {
|
||||
offset := (page.Number - 1) * page.Size
|
||||
query = query.Limit(int(page.Size)).Offset(int(offset))
|
||||
}
|
||||
|
||||
if err := query.Order("start_date DESC").Find(&events).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return events, nil
|
||||
}
|
||||
70
pkg/datastore/gormstore/aor_submissions.go
Normal file
70
pkg/datastore/gormstore/aor_submissions.go
Normal file
@@ -0,0 +1,70 @@
|
||||
package gormstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type AORSubmissions struct {
|
||||
db *gorm.DB
|
||||
}
|
||||
|
||||
func (env *AORSubmissions) Get(ctx context.Context, id int64) (model.AORSubmission, error) {
|
||||
var aorSubmission model.AORSubmission
|
||||
if err := env.db.First(&aorSubmission, id).Error; err != nil {
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
return aorSubmission, datastore.ErrNotExist
|
||||
}
|
||||
return aorSubmission, err
|
||||
}
|
||||
return aorSubmission, nil
|
||||
}
|
||||
|
||||
func (env *AORSubmissions) GetByAOREvent(ctx context.Context, eventID int64) ([]model.AORSubmission, error) {
|
||||
var aorSubmissions []model.AORSubmission
|
||||
if err := env.db.Where("aor_event_id = ?", eventID).Order("added_at DESC").Find(&aorSubmissions).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return aorSubmissions, nil
|
||||
}
|
||||
|
||||
func (env *AORSubmissions) GetBySubmission(ctx context.Context, submissionID int64) ([]model.AORSubmission, error) {
|
||||
var aorSubmissions []model.AORSubmission
|
||||
if err := env.db.Where("submission_id = ?", submissionID).Order("added_at DESC").Find(&aorSubmissions).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return aorSubmissions, nil
|
||||
}
|
||||
|
||||
func (env *AORSubmissions) Create(ctx context.Context, aorSubmission model.AORSubmission) (model.AORSubmission, error) {
|
||||
if err := env.db.Create(&aorSubmission).Error; err != nil {
|
||||
return aorSubmission, err
|
||||
}
|
||||
return aorSubmission, nil
|
||||
}
|
||||
|
||||
func (env *AORSubmissions) Delete(ctx context.Context, id int64) error {
|
||||
if err := env.db.Delete(&model.AORSubmission{}, id).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
return datastore.ErrNotExist
|
||||
}
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (env *AORSubmissions) ListWithSubmissions(ctx context.Context, eventID int64) ([]model.Submission, error) {
|
||||
var submissions []model.Submission
|
||||
if err := env.db.
|
||||
Joins("JOIN aor_submissions ON aor_submissions.submission_id = submissions.id").
|
||||
Where("aor_submissions.aor_event_id = ?", eventID).
|
||||
Order("aor_submissions.added_at DESC").
|
||||
Find(&submissions).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return submissions, nil
|
||||
}
|
||||
@@ -31,11 +31,14 @@ func New(ctx *cli.Context) (datastore.Datastore, error) {
|
||||
|
||||
if ctx.Bool("migrate") {
|
||||
if err := db.AutoMigrate(
|
||||
&model.AOREvent{},
|
||||
&model.AORSubmission{},
|
||||
&model.AuditEvent{},
|
||||
&model.Map{},
|
||||
&model.Mapfix{},
|
||||
&model.Operation{},
|
||||
&model.Submission{},
|
||||
&model.SubmissionReview{},
|
||||
&model.Script{},
|
||||
&model.ScriptPolicy{},
|
||||
); err != nil {
|
||||
|
||||
@@ -9,6 +9,14 @@ type Gormstore struct {
|
||||
db *gorm.DB
|
||||
}
|
||||
|
||||
func (g Gormstore) AOREvents() datastore.AOREvents {
|
||||
return &AOREvents{db: g.db}
|
||||
}
|
||||
|
||||
func (g Gormstore) AORSubmissions() datastore.AORSubmissions {
|
||||
return &AORSubmissions{db: g.db}
|
||||
}
|
||||
|
||||
func (g Gormstore) AuditEvents() datastore.AuditEvents {
|
||||
return &AuditEvents{db: g.db}
|
||||
}
|
||||
@@ -29,6 +37,10 @@ func (g Gormstore) Submissions() datastore.Submissions {
|
||||
return &Submissions{db: g.db}
|
||||
}
|
||||
|
||||
func (g Gormstore) SubmissionReviews() datastore.SubmissionReviews {
|
||||
return &SubmissionReviews{db: g.db}
|
||||
}
|
||||
|
||||
func (g Gormstore) Scripts() datastore.Scripts {
|
||||
return &Scripts{db: g.db}
|
||||
}
|
||||
|
||||
@@ -74,21 +74,9 @@ func (env *Maps) Delete(ctx context.Context, id int64) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (env *Maps) GetAll(ctx context.Context) ([]model.Map, error) {
|
||||
var maps []model.Map
|
||||
if err := env.db.Find(&maps).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return maps, nil
|
||||
}
|
||||
|
||||
func (env *Maps) List(ctx context.Context, filters datastore.OptionalMap, page model.Page) ([]model.Map, error) {
|
||||
var events []model.Map
|
||||
tx := env.db.Model(&model.Map{})
|
||||
if displayName, ok := filters.Pop("display_name"); ok {
|
||||
tx = tx.Where("display_name ILIKE ?", "%"+displayName.(string)+"%")
|
||||
}
|
||||
if err := tx.Where(filters.Map()).Offset(int((page.Number - 1) * page.Size)).Limit(int(page.Size)).Find(&events).Error; err != nil {
|
||||
if err := env.db.Where(filters.Map()).Offset(int((page.Number - 1) * page.Size)).Limit(int(page.Size)).Find(&events).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
83
pkg/datastore/gormstore/submission_reviews.go
Normal file
83
pkg/datastore/gormstore/submission_reviews.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package gormstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type SubmissionReviews struct {
|
||||
db *gorm.DB
|
||||
}
|
||||
|
||||
func (env *SubmissionReviews) Get(ctx context.Context, id int64) (model.SubmissionReview, error) {
|
||||
var review model.SubmissionReview
|
||||
if err := env.db.First(&review, id).Error; err != nil {
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
return review, datastore.ErrNotExist
|
||||
}
|
||||
return review, err
|
||||
}
|
||||
return review, nil
|
||||
}
|
||||
|
||||
func (env *SubmissionReviews) GetBySubmissionAndReviewer(ctx context.Context, submissionID int64, reviewerID uint64) (model.SubmissionReview, error) {
|
||||
var review model.SubmissionReview
|
||||
if err := env.db.Where("submission_id = ? AND reviewer_id = ?", submissionID, reviewerID).First(&review).Error; err != nil {
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
return review, datastore.ErrNotExist
|
||||
}
|
||||
return review, err
|
||||
}
|
||||
return review, nil
|
||||
}
|
||||
|
||||
func (env *SubmissionReviews) Create(ctx context.Context, review model.SubmissionReview) (model.SubmissionReview, error) {
|
||||
if err := env.db.Create(&review).Error; err != nil {
|
||||
return review, err
|
||||
}
|
||||
|
||||
return review, nil
|
||||
}
|
||||
|
||||
func (env *SubmissionReviews) Update(ctx context.Context, id int64, values datastore.OptionalMap) error {
|
||||
if err := env.db.Model(&model.SubmissionReview{}).Where("id = ?", id).Updates(values.Map()).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
return datastore.ErrNotExist
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (env *SubmissionReviews) Delete(ctx context.Context, id int64) error {
|
||||
if err := env.db.Delete(&model.SubmissionReview{}, id).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
return datastore.ErrNotExist
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (env *SubmissionReviews) ListBySubmission(ctx context.Context, submissionID int64) ([]model.SubmissionReview, error) {
|
||||
var reviews []model.SubmissionReview
|
||||
if err := env.db.Where("submission_id = ?", submissionID).Order("created_at DESC").Find(&reviews).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return reviews, nil
|
||||
}
|
||||
|
||||
func (env *SubmissionReviews) MarkOutdatedBySubmission(ctx context.Context, submissionID int64) error {
|
||||
if err := env.db.Model(&model.SubmissionReview{}).Where("submission_id = ?", submissionID).Update("outdated", true).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
37
pkg/model/aor_event.go
Normal file
37
pkg/model/aor_event.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package model
|
||||
|
||||
import "time"
|
||||
|
||||
type AOREventStatus int32
|
||||
|
||||
const (
|
||||
AOREventStatusScheduled AOREventStatus = 0 // Event scheduled, waiting for start
|
||||
AOREventStatusOpen AOREventStatus = 1 // Event started, accepting submissions (1st of month)
|
||||
AOREventStatusFrozen AOREventStatus = 2 // Submissions frozen (after 1st of month)
|
||||
AOREventStatusSelected AOREventStatus = 3 // Submissions selected for AOR (after week 1)
|
||||
AOREventStatusCompleted AOREventStatus = 4 // Decisions finalized (end of month)
|
||||
AOREventStatusClosed AOREventStatus = 5 // Event closed/archived
|
||||
)
|
||||
|
||||
// AOREvent represents an Accept or Reject event cycle
|
||||
// AOR events occur every 4 months (April, August, December)
|
||||
type AOREvent struct {
|
||||
ID int64 `gorm:"primaryKey"`
|
||||
StartDate time.Time `gorm:"index"` // 1st day of AOR month
|
||||
FreezeDate time.Time // End of 1st day (23:59:59)
|
||||
SelectionDate time.Time // End of week 1 (7 days after start)
|
||||
DecisionDate time.Time // End of month (when final decisions are made)
|
||||
Status AOREventStatus
|
||||
CreatedAt time.Time
|
||||
UpdatedAt time.Time
|
||||
}
|
||||
|
||||
// AORSubmission represents a submission that was added to an AOR event
|
||||
type AORSubmission struct {
|
||||
ID int64 `gorm:"primaryKey"`
|
||||
AOREventID int64 `gorm:"index"`
|
||||
SubmissionID int64 `gorm:"index"`
|
||||
AddedAt time.Time
|
||||
CreatedAt time.Time
|
||||
UpdatedAt time.Time
|
||||
}
|
||||
@@ -91,7 +91,3 @@ type ReleaseMapfixRequest struct {
|
||||
ModelVersion uint64
|
||||
TargetAssetID uint64
|
||||
}
|
||||
|
||||
type SeedCombobulatorRequest struct {
|
||||
AssetID uint64
|
||||
}
|
||||
|
||||
14
pkg/model/submission_review.go
Normal file
14
pkg/model/submission_review.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package model
|
||||
|
||||
import "time"
|
||||
|
||||
type SubmissionReview struct {
|
||||
ID int64 `gorm:"primaryKey"`
|
||||
SubmissionID int64 `gorm:"index"`
|
||||
ReviewerID uint64
|
||||
Recommend bool
|
||||
Description string
|
||||
Outdated bool
|
||||
CreatedAt time.Time
|
||||
UpdatedAt time.Time
|
||||
}
|
||||
@@ -1,9 +1,8 @@
|
||||
package dto
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"git.itzana.me/strafesnet/go-grpc/maps_extended"
|
||||
"time"
|
||||
)
|
||||
|
||||
type MapFilter struct {
|
||||
|
||||
@@ -81,47 +81,6 @@ func (h *MapHandler) Get(ctx *gin.Context) {
|
||||
})
|
||||
}
|
||||
|
||||
// @Summary Download SNFM file
|
||||
// @Description Redirects to a signed download URL for a map's SNFM file
|
||||
// @Tags maps
|
||||
// @Security ApiKeyAuth
|
||||
// @Param id path int true "Map ID"
|
||||
// @Success 307 "Redirect to signed S3 URL"
|
||||
// @Failure 404 {object} dto.Error "Map not found"
|
||||
// @Failure default {object} dto.Error "General error response"
|
||||
// @Router /map/{id}/snfm [get]
|
||||
func (h *MapHandler) GetSnfmDownloadUrl(ctx *gin.Context) {
|
||||
id := ctx.Param("id")
|
||||
mapID, err := strconv.ParseInt(id, 10, 64)
|
||||
if err != nil {
|
||||
ctx.JSON(http.StatusBadRequest, dto.Error{
|
||||
Error: "Invalid map ID format",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
resp, err := maps_extended.NewMapsServiceClient(h.mapsClient).GetSnfmDownloadUrl(ctx, &maps_extended.MapId{
|
||||
ID: mapID,
|
||||
})
|
||||
if err != nil {
|
||||
statusCode := http.StatusInternalServerError
|
||||
errorMessage := "Failed to get download URL"
|
||||
|
||||
if status.Code(err) == codes.NotFound {
|
||||
statusCode = http.StatusNotFound
|
||||
errorMessage = "Map not found"
|
||||
}
|
||||
|
||||
ctx.JSON(statusCode, dto.Error{
|
||||
Error: errorMessage,
|
||||
})
|
||||
log.WithError(err).Error("Failed to get SNFM download URL")
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Redirect(http.StatusTemporaryRedirect, resp.Url)
|
||||
}
|
||||
|
||||
// @Summary List maps
|
||||
// @Description Get a list of maps
|
||||
// @Tags maps
|
||||
|
||||
@@ -93,13 +93,6 @@ func setupRoutes(cfg *RouterConfig) (*gin.Engine, error) {
|
||||
v1.GET("/map/:id", mapsHandler.Get)
|
||||
}
|
||||
|
||||
v1Download := public_api.Group("/v1")
|
||||
{
|
||||
v1Download.Use(middleware.ValidateRequest("Maps", "Download", cfg.devClient))
|
||||
|
||||
v1Download.GET("/map/:id/snfm", mapsHandler.GetSnfmDownloadUrl)
|
||||
}
|
||||
|
||||
// Docs
|
||||
public_api.GET("/docs/*any", ginSwagger.WrapHandler(swaggerfiles.Handler))
|
||||
public_api.GET("/", func(ctx *gin.Context) {
|
||||
|
||||
30
pkg/service/aor_events.go
Normal file
30
pkg/service/aor_events.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
)
|
||||
|
||||
// AOR Event service methods
|
||||
|
||||
func (svc *Service) GetAOREvent(ctx context.Context, id int64) (model.AOREvent, error) {
|
||||
return svc.db.AOREvents().Get(ctx, id)
|
||||
}
|
||||
|
||||
func (svc *Service) GetActiveAOREvent(ctx context.Context) (model.AOREvent, error) {
|
||||
return svc.db.AOREvents().GetActive(ctx)
|
||||
}
|
||||
|
||||
func (svc *Service) ListAOREvents(ctx context.Context, page model.Page) ([]model.AOREvent, error) {
|
||||
return svc.db.AOREvents().List(ctx, datastore.Optional(), page)
|
||||
}
|
||||
|
||||
func (svc *Service) GetAORSubmissionsByEvent(ctx context.Context, eventID int64) ([]model.Submission, error) {
|
||||
return svc.db.AORSubmissions().ListWithSubmissions(ctx, eventID)
|
||||
}
|
||||
|
||||
func (svc *Service) GetAORSubmissionsBySubmission(ctx context.Context, submissionID int64) ([]model.AORSubmission, error) {
|
||||
return svc.db.AORSubmissions().GetBySubmission(ctx, submissionID)
|
||||
}
|
||||
389
pkg/service/aor_scheduler.go
Normal file
389
pkg/service/aor_scheduler.go
Normal file
@@ -0,0 +1,389 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// AORScheduler manages AOR events and their lifecycle
|
||||
type AORScheduler struct {
|
||||
ds datastore.Datastore
|
||||
ctx context.Context
|
||||
}
|
||||
|
||||
// NewAORScheduler creates a new AOR scheduler
|
||||
func NewAORScheduler(ds datastore.Datastore) *AORScheduler {
|
||||
return &AORScheduler{
|
||||
ds: ds,
|
||||
ctx: context.Background(),
|
||||
}
|
||||
}
|
||||
|
||||
// ProcessAOREvents is the main entry point for the cron job
|
||||
// It checks and updates AOR event statuses
|
||||
func (s *AORScheduler) ProcessAOREvents() error {
|
||||
log.Info("AOR Scheduler: Processing events")
|
||||
|
||||
// Initialize: create next AOR event if none exists
|
||||
if err := s.ensureNextAOREvent(); err != nil {
|
||||
log.WithError(err).Error("Failed to ensure next AOR event")
|
||||
return err
|
||||
}
|
||||
|
||||
// Process current active event
|
||||
if err := s.processAOREvents(); err != nil {
|
||||
log.WithError(err).Error("Failed to process AOR events")
|
||||
return err
|
||||
}
|
||||
|
||||
log.Info("AOR Scheduler: Processing completed successfully")
|
||||
return nil
|
||||
}
|
||||
|
||||
// ensureNextAOREvent creates the next AOR event if one doesn't exist
|
||||
func (s *AORScheduler) ensureNextAOREvent() error {
|
||||
// Check if there's an active or scheduled event
|
||||
_, err := s.ds.AOREvents().GetActive(s.ctx)
|
||||
if err == nil {
|
||||
// Event exists, nothing to do
|
||||
return nil
|
||||
}
|
||||
if err != datastore.ErrNotExist {
|
||||
return err
|
||||
}
|
||||
|
||||
// No active event, create the next one
|
||||
nextDate := s.calculateNextAORDate(time.Now())
|
||||
return s.createAOREvent(nextDate)
|
||||
}
|
||||
|
||||
// calculateNextAORDate calculates the next AOR start date
|
||||
// AOR events are held every 4 months: April, August, December
|
||||
func (s *AORScheduler) calculateNextAORDate(from time.Time) time.Time {
|
||||
aorMonths := []time.Month{time.April, time.August, time.December}
|
||||
|
||||
currentYear := from.Year()
|
||||
currentMonth := from.Month()
|
||||
|
||||
// Find the next AOR month
|
||||
for _, month := range aorMonths {
|
||||
if month > currentMonth {
|
||||
// Next AOR is this year
|
||||
return time.Date(currentYear, month, 1, 0, 0, 0, 0, time.UTC)
|
||||
}
|
||||
}
|
||||
|
||||
// Next AOR is in April of next year
|
||||
return time.Date(currentYear+1, time.April, 1, 0, 0, 0, 0, time.UTC)
|
||||
}
|
||||
|
||||
// createAOREvent creates a new AOR event with calculated dates
|
||||
func (s *AORScheduler) createAOREvent(startDate time.Time) error {
|
||||
freezeDate := startDate.Add(24*time.Hour - time.Second) // End of first day (23:59:59)
|
||||
selectionDate := startDate.Add(7 * 24 * time.Hour) // 7 days after start
|
||||
|
||||
// Decision date is the last day of the month at 23:59:59
|
||||
// Calculate the first day of next month, then subtract 1 second
|
||||
year, month, _ := startDate.Date()
|
||||
firstOfNextMonth := time.Date(year, month+1, 1, 0, 0, 0, 0, time.UTC)
|
||||
decisionDate := firstOfNextMonth.Add(-time.Second)
|
||||
|
||||
event := model.AOREvent{
|
||||
StartDate: startDate,
|
||||
FreezeDate: freezeDate,
|
||||
SelectionDate: selectionDate,
|
||||
DecisionDate: decisionDate,
|
||||
Status: model.AOREventStatusScheduled,
|
||||
}
|
||||
|
||||
_, err := s.ds.AOREvents().Create(s.ctx, event)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.WithFields(log.Fields{
|
||||
"start_date": startDate,
|
||||
"freeze_date": freezeDate,
|
||||
"selection_date": selectionDate,
|
||||
"decision_date": decisionDate,
|
||||
}).Info("Created new AOR event")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// processAOREvents checks and updates AOR event statuses
|
||||
func (s *AORScheduler) processAOREvents() error {
|
||||
now := time.Now()
|
||||
|
||||
// Get active event
|
||||
event, err := s.ds.AOREvents().GetActive(s.ctx)
|
||||
if err == datastore.ErrNotExist {
|
||||
// No active event, ensure one is created
|
||||
return s.ensureNextAOREvent()
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Process event based on current status and dates
|
||||
switch event.Status {
|
||||
case model.AOREventStatusScheduled:
|
||||
// Check if event should start (it's now the 1st of the AOR month)
|
||||
if now.After(event.StartDate) || now.Equal(event.StartDate) {
|
||||
if err := s.openAOREvent(event.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
case model.AOREventStatusOpen:
|
||||
// Check if submissions should be frozen (past the freeze date)
|
||||
if now.After(event.FreezeDate) {
|
||||
if err := s.freezeAOREvent(event.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
case model.AOREventStatusFrozen:
|
||||
// Check if it's time to select submissions (past selection date)
|
||||
if now.After(event.SelectionDate) {
|
||||
if err := s.selectSubmissions(event.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
case model.AOREventStatusSelected:
|
||||
// Check if it's time to finalize decisions (past decision date)
|
||||
if now.After(event.DecisionDate) {
|
||||
if err := s.finalizeDecisions(event.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
case model.AOREventStatusCompleted:
|
||||
// Event completed, create next one and close this one
|
||||
nextDate := s.calculateNextAORDate(event.StartDate)
|
||||
if err := s.createAOREvent(nextDate); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.closeAOREvent(event.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// openAOREvent transitions an event to Open status
|
||||
func (s *AORScheduler) openAOREvent(eventID int64) error {
|
||||
err := s.ds.AOREvents().Update(s.ctx, eventID, datastore.Optional().Add("status", model.AOREventStatusOpen))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.WithField("event_id", eventID).Info("AOR event opened - submissions now accepted")
|
||||
return nil
|
||||
}
|
||||
|
||||
// freezeAOREvent transitions an event to Frozen status
|
||||
// TODO: lock submission from updates
|
||||
func (s *AORScheduler) freezeAOREvent(eventID int64) error {
|
||||
err := s.ds.AOREvents().Update(s.ctx, eventID, datastore.Optional().Add("status", model.AOREventStatusFrozen))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.WithField("event_id", eventID).Info("AOR event frozen - submissions locked")
|
||||
return nil
|
||||
}
|
||||
|
||||
// selectSubmissions automatically selects qualifying submissions
|
||||
func (s *AORScheduler) selectSubmissions(eventID int64) error {
|
||||
// Get all submissions in Submitted status
|
||||
submissions, err := s.ds.Submissions().List(s.ctx, datastore.Optional().Add("status_id", model.SubmissionStatusSubmitted), model.Page{Number: 0, Size: 0}, datastore.ListSortDisabled)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
selectedCount := 0
|
||||
for _, submission := range submissions {
|
||||
// Get all reviews for this submission
|
||||
reviews, err := s.ds.SubmissionReviews().ListBySubmission(s.ctx, submission.ID)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("submission_id", submission.ID).Error("Failed to get reviews")
|
||||
continue
|
||||
}
|
||||
|
||||
// Apply selection criteria
|
||||
if s.shouldAddToAOR(reviews) {
|
||||
// Add to AOR event
|
||||
aorSubmission := model.AORSubmission{
|
||||
AOREventID: eventID,
|
||||
SubmissionID: submission.ID,
|
||||
AddedAt: time.Now(),
|
||||
}
|
||||
_, err := s.ds.AORSubmissions().Create(s.ctx, aorSubmission)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("submission_id", submission.ID).Error("Failed to add submission to AOR")
|
||||
continue
|
||||
}
|
||||
selectedCount++
|
||||
log.WithField("submission_id", submission.ID).Info("Added submission to AOR event")
|
||||
}
|
||||
}
|
||||
|
||||
// Mark event as selected (waiting for end of month to finalize)
|
||||
err = s.ds.AOREvents().Update(s.ctx, eventID, datastore.Optional().Add("status", model.AOREventStatusSelected))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.WithFields(log.Fields{
|
||||
"event_id": eventID,
|
||||
"selected_count": selectedCount,
|
||||
}).Info("AOR submission selection completed - waiting for end of month to finalize decisions")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// shouldAddToAOR determines if a submission should be added to the AOR event
|
||||
// Criteria:
|
||||
// - If there are 0 reviews: NOT added
|
||||
// - If there is 1+ review with recommend=true and not outdated: added
|
||||
// - If majority (>=50%) of non-outdated reviews recommend: added
|
||||
// TODO: Audit events
|
||||
func (s *AORScheduler) shouldAddToAOR(reviews []model.SubmissionReview) bool {
|
||||
// Filter out outdated reviews
|
||||
var validReviews []model.SubmissionReview
|
||||
for _, review := range reviews {
|
||||
if !review.Outdated {
|
||||
validReviews = append(validReviews, review)
|
||||
}
|
||||
}
|
||||
|
||||
// If there are 0 valid reviews, don't add
|
||||
if len(validReviews) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Count recommendations
|
||||
recommendCount := 0
|
||||
for _, review := range validReviews {
|
||||
if review.Recommend {
|
||||
recommendCount++
|
||||
}
|
||||
}
|
||||
|
||||
// Need at least 50% recommendations (2 accept + 2 deny = 50% = added)
|
||||
// This means recommendCount * 2 >= len(validReviews)
|
||||
return recommendCount*2 >= len(validReviews)
|
||||
}
|
||||
|
||||
// shouldAccept determines if a submission should be accepted in final decisions
|
||||
// Criteria: Must have >50% (strictly greater than) recommendations
|
||||
func (s *AORScheduler) shouldAccept(reviews []model.SubmissionReview) bool {
|
||||
// Filter out outdated reviews
|
||||
var validReviews []model.SubmissionReview
|
||||
for _, review := range reviews {
|
||||
if !review.Outdated {
|
||||
validReviews = append(validReviews, review)
|
||||
}
|
||||
}
|
||||
|
||||
// If there are 0 valid reviews, don't accept
|
||||
if len(validReviews) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Count recommendations
|
||||
recommendCount := 0
|
||||
for _, review := range validReviews {
|
||||
if review.Recommend {
|
||||
recommendCount++
|
||||
}
|
||||
}
|
||||
|
||||
// Need MORE than 50% recommendations (strictly greater)
|
||||
// This means recommendCount * 2 > len(validReviews)
|
||||
return recommendCount*2 > len(validReviews)
|
||||
}
|
||||
|
||||
// finalizeDecisions makes final accept/reject decisions at end of month
|
||||
// Submissions in the AOR event with >50% recommends are accepted
|
||||
// Submissions in the AOR event with <=50% recommends are rejected
|
||||
// TODO: Implement acceptance logic
|
||||
// TODO: Query roblox group to get get min votes needed for acceptance
|
||||
// TODO: Audit events
|
||||
func (s *AORScheduler) finalizeDecisions(eventID int64) error {
|
||||
// Get all submissions that were selected for this AOR event
|
||||
aorSubmissions, err := s.ds.AORSubmissions().GetByAOREvent(s.ctx, eventID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
acceptedCount := 0
|
||||
rejectedCount := 0
|
||||
|
||||
// Process each submission in the AOR event
|
||||
for _, aorSub := range aorSubmissions {
|
||||
// Get the submission
|
||||
submission, err := s.ds.Submissions().Get(s.ctx, aorSub.SubmissionID)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("submission_id", aorSub.SubmissionID).Error("Failed to get submission")
|
||||
continue
|
||||
}
|
||||
|
||||
// Get all reviews for this submission
|
||||
reviews, err := s.ds.SubmissionReviews().ListBySubmission(s.ctx, aorSub.SubmissionID)
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("submission_id", aorSub.SubmissionID).Error("Failed to get reviews")
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if submission has >50% recommends (strictly greater)
|
||||
if s.shouldAccept(reviews) {
|
||||
// This submission has >50% recommends - accept it
|
||||
// TODO: Implement acceptance logic
|
||||
// For now, this is a placeholder
|
||||
log.WithField("submission_id", submission.ID).Info("TODO: Accept submission (placeholder)")
|
||||
acceptedCount++
|
||||
} else {
|
||||
// This submission does not have >50% recommends - reject it
|
||||
err := s.ds.Submissions().Update(s.ctx, submission.ID, datastore.Optional().Add("status_id", model.SubmissionStatusRejected))
|
||||
if err != nil {
|
||||
log.WithError(err).WithField("submission_id", submission.ID).Error("Failed to reject submission")
|
||||
continue
|
||||
}
|
||||
log.WithField("submission_id", submission.ID).Info("Rejected submission")
|
||||
rejectedCount++
|
||||
}
|
||||
}
|
||||
|
||||
// Mark event as completed
|
||||
err = s.ds.AOREvents().Update(s.ctx, eventID, datastore.Optional().Add("status", model.AOREventStatusCompleted))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.WithFields(log.Fields{
|
||||
"event_id": eventID,
|
||||
"accepted_count": acceptedCount,
|
||||
"rejected_count": rejectedCount,
|
||||
}).Info("AOR decisions finalized")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// closeAOREvent transitions an event to Closed status
|
||||
func (s *AORScheduler) closeAOREvent(eventID int64) error {
|
||||
err := s.ds.AOREvents().Update(s.ctx, eventID, datastore.Optional().Add("status", model.AOREventStatusClosed))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.WithField("event_id", eventID).Info("AOR event closed")
|
||||
return nil
|
||||
}
|
||||
@@ -103,10 +103,6 @@ func (svc *Service) GetMapfix(ctx context.Context, id int64) (model.Mapfix, erro
|
||||
return svc.db.Mapfixes().Get(ctx, id)
|
||||
}
|
||||
|
||||
func (svc *Service) GetMapfixList(ctx context.Context, ids []int64) ([]model.Mapfix, error) {
|
||||
return svc.db.Mapfixes().GetList(ctx, ids)
|
||||
}
|
||||
|
||||
func (svc *Service) UpdateMapfix(ctx context.Context, id int64, pmap MapfixUpdate) error {
|
||||
return svc.db.Mapfixes().Update(ctx, id, datastore.OptionalMap(pmap))
|
||||
}
|
||||
|
||||
@@ -99,10 +99,6 @@ func (svc *Service) CreateMap(ctx context.Context, item model.Map) (int64, error
|
||||
return map_item.ID, nil
|
||||
}
|
||||
|
||||
func (svc *Service) GetAllMaps(ctx context.Context) ([]model.Map, error) {
|
||||
return svc.db.Maps().GetAll(ctx)
|
||||
}
|
||||
|
||||
func (svc *Service) ListMaps(ctx context.Context, filter MapFilter, page model.Page) ([]model.Map, error) {
|
||||
return svc.db.Maps().List(ctx, datastore.OptionalMap(filter), page)
|
||||
}
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
)
|
||||
|
||||
func (svc *Service) NatsSeedCombobulator(assetID uint64) error {
|
||||
request := model.SeedCombobulatorRequest{
|
||||
AssetID: assetID,
|
||||
}
|
||||
|
||||
j, err := json.Marshal(request)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = svc.nats.Publish("maptest.combobulator.seed", j)
|
||||
return err
|
||||
}
|
||||
@@ -2,14 +2,11 @@ package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"git.itzana.me/strafesnet/go-grpc/maps"
|
||||
"git.itzana.me/strafesnet/go-grpc/users"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/roblox"
|
||||
"github.com/aws/aws-sdk-go-v2/service/s3"
|
||||
"github.com/nats-io/nats.go"
|
||||
"github.com/redis/go-redis/v9"
|
||||
)
|
||||
@@ -20,8 +17,6 @@ type Service struct {
|
||||
maps maps.MapsServiceClient
|
||||
users users.UsersServiceClient
|
||||
thumbnailService *ThumbnailService
|
||||
s3Presign *s3.PresignClient
|
||||
s3Bucket string
|
||||
}
|
||||
|
||||
func NewService(
|
||||
@@ -31,8 +26,6 @@ func NewService(
|
||||
users users.UsersServiceClient,
|
||||
robloxClient *roblox.Client,
|
||||
redisClient *redis.Client,
|
||||
s3Client *s3.Client,
|
||||
s3Bucket string,
|
||||
) Service {
|
||||
return Service{
|
||||
db: db,
|
||||
@@ -40,23 +33,9 @@ func NewService(
|
||||
maps: maps,
|
||||
users: users,
|
||||
thumbnailService: NewThumbnailService(robloxClient, redisClient),
|
||||
s3Presign: s3.NewPresignClient(s3Client),
|
||||
s3Bucket: s3Bucket,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Service) GetSnfmDownloadUrl(ctx context.Context, mapID int64) (string, error) {
|
||||
key := fmt.Sprintf("maps/%d.snfm", mapID)
|
||||
presigned, err := s.s3Presign.PresignGetObject(ctx, &s3.GetObjectInput{
|
||||
Bucket: &s.s3Bucket,
|
||||
Key: &key,
|
||||
}, s3.WithPresignExpires(5*time.Minute))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return presigned.URL, nil
|
||||
}
|
||||
|
||||
// GetAssetThumbnails proxies to the thumbnail service
|
||||
func (s *Service) GetAssetThumbnails(ctx context.Context, assetIDs []uint64, size roblox.ThumbnailSize) (map[uint64]string, error) {
|
||||
return s.thumbnailService.GetAssetThumbnails(ctx, assetIDs, size)
|
||||
|
||||
55
pkg/service/submission_reviews.go
Normal file
55
pkg/service/submission_reviews.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
)
|
||||
|
||||
type SubmissionReviewUpdate datastore.OptionalMap
|
||||
|
||||
func NewSubmissionReviewUpdate() SubmissionReviewUpdate {
|
||||
update := datastore.Optional()
|
||||
return SubmissionReviewUpdate(update)
|
||||
}
|
||||
|
||||
func (update SubmissionReviewUpdate) SetRecommend(recommend bool) {
|
||||
datastore.OptionalMap(update).Add("recommend", recommend)
|
||||
}
|
||||
|
||||
func (update SubmissionReviewUpdate) SetDescription(description string) {
|
||||
datastore.OptionalMap(update).Add("description", description)
|
||||
}
|
||||
|
||||
func (update SubmissionReviewUpdate) SetOutdated(outdated bool) {
|
||||
datastore.OptionalMap(update).Add("outdated", outdated)
|
||||
}
|
||||
|
||||
func (svc *Service) CreateSubmissionReview(ctx context.Context, review model.SubmissionReview) (model.SubmissionReview, error) {
|
||||
return svc.db.SubmissionReviews().Create(ctx, review)
|
||||
}
|
||||
|
||||
func (svc *Service) GetSubmissionReview(ctx context.Context, id int64) (model.SubmissionReview, error) {
|
||||
return svc.db.SubmissionReviews().Get(ctx, id)
|
||||
}
|
||||
|
||||
func (svc *Service) GetSubmissionReviewBySubmissionAndReviewer(ctx context.Context, submissionID int64, reviewerID uint64) (model.SubmissionReview, error) {
|
||||
return svc.db.SubmissionReviews().GetBySubmissionAndReviewer(ctx, submissionID, reviewerID)
|
||||
}
|
||||
|
||||
func (svc *Service) UpdateSubmissionReview(ctx context.Context, id int64, update SubmissionReviewUpdate) error {
|
||||
return svc.db.SubmissionReviews().Update(ctx, id, datastore.OptionalMap(update))
|
||||
}
|
||||
|
||||
func (svc *Service) DeleteSubmissionReview(ctx context.Context, id int64) error {
|
||||
return svc.db.SubmissionReviews().Delete(ctx, id)
|
||||
}
|
||||
|
||||
func (svc *Service) ListSubmissionReviewsBySubmission(ctx context.Context, submissionID int64) ([]model.SubmissionReview, error) {
|
||||
return svc.db.SubmissionReviews().ListBySubmission(ctx, submissionID)
|
||||
}
|
||||
|
||||
func (svc *Service) MarkSubmissionReviewsOutdated(ctx context.Context, submissionID int64) error {
|
||||
return svc.db.SubmissionReviews().MarkOutdatedBySubmission(ctx, submissionID)
|
||||
}
|
||||
121
pkg/web_api/aor_events.go
Normal file
121
pkg/web_api/aor_events.go
Normal file
@@ -0,0 +1,121 @@
|
||||
package web_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/api"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
)
|
||||
|
||||
// ListAOREvents implements listAOREvents operation.
|
||||
//
|
||||
// Get list of AOR events.
|
||||
//
|
||||
// GET /aor-events
|
||||
func (svc *Service) ListAOREvents(ctx context.Context, params api.ListAOREventsParams) ([]api.AOREvent, error) {
|
||||
page := model.Page{
|
||||
Number: params.Page,
|
||||
Size: params.Limit,
|
||||
}
|
||||
|
||||
events, err := svc.inner.ListAOREvents(ctx, page)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var resp []api.AOREvent
|
||||
for _, event := range events {
|
||||
resp = append(resp, api.AOREvent{
|
||||
ID: event.ID,
|
||||
StartDate: event.StartDate.Unix(),
|
||||
FreezeDate: event.FreezeDate.Unix(),
|
||||
SelectionDate: event.SelectionDate.Unix(),
|
||||
DecisionDate: event.DecisionDate.Unix(),
|
||||
Status: int32(event.Status),
|
||||
CreatedAt: event.CreatedAt.Unix(),
|
||||
UpdatedAt: event.UpdatedAt.Unix(),
|
||||
})
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
// GetActiveAOREvent implements getActiveAOREvent operation.
|
||||
//
|
||||
// Get the currently active AOR event.
|
||||
//
|
||||
// GET /aor-events/active
|
||||
func (svc *Service) GetActiveAOREvent(ctx context.Context) (*api.AOREvent, error) {
|
||||
event, err := svc.inner.GetActiveAOREvent(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &api.AOREvent{
|
||||
ID: event.ID,
|
||||
StartDate: event.StartDate.Unix(),
|
||||
FreezeDate: event.FreezeDate.Unix(),
|
||||
SelectionDate: event.SelectionDate.Unix(),
|
||||
DecisionDate: event.DecisionDate.Unix(),
|
||||
Status: int32(event.Status),
|
||||
CreatedAt: event.CreatedAt.Unix(),
|
||||
UpdatedAt: event.UpdatedAt.Unix(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetAOREvent implements getAOREvent operation.
|
||||
//
|
||||
// Get a specific AOR event.
|
||||
//
|
||||
// GET /aor-events/{AOREventID}
|
||||
func (svc *Service) GetAOREvent(ctx context.Context, params api.GetAOREventParams) (*api.AOREvent, error) {
|
||||
event, err := svc.inner.GetAOREvent(ctx, params.AOREventID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &api.AOREvent{
|
||||
ID: event.ID,
|
||||
StartDate: event.StartDate.Unix(),
|
||||
FreezeDate: event.FreezeDate.Unix(),
|
||||
SelectionDate: event.SelectionDate.Unix(),
|
||||
DecisionDate: event.DecisionDate.Unix(),
|
||||
Status: int32(event.Status),
|
||||
CreatedAt: event.CreatedAt.Unix(),
|
||||
UpdatedAt: event.UpdatedAt.Unix(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetAOREventSubmissions implements getAOREventSubmissions operation.
|
||||
//
|
||||
// Get all submissions for a specific AOR event.
|
||||
//
|
||||
// GET /aor-events/{AOREventID}/submissions
|
||||
func (svc *Service) GetAOREventSubmissions(ctx context.Context, params api.GetAOREventSubmissionsParams) ([]api.Submission, error) {
|
||||
submissions, err := svc.inner.GetAORSubmissionsByEvent(ctx, params.AOREventID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var resp []api.Submission
|
||||
for _, submission := range submissions {
|
||||
resp = append(resp, api.Submission{
|
||||
ID: submission.ID,
|
||||
DisplayName: submission.DisplayName,
|
||||
Creator: submission.Creator,
|
||||
GameID: int32(submission.GameID),
|
||||
CreatedAt: submission.CreatedAt.Unix(),
|
||||
UpdatedAt: submission.UpdatedAt.Unix(),
|
||||
Submitter: int64(submission.Submitter),
|
||||
AssetID: int64(submission.AssetID),
|
||||
AssetVersion: int64(submission.AssetVersion),
|
||||
ValidatedAssetID: api.NewOptInt64(int64(submission.ValidatedAssetID)),
|
||||
ValidatedAssetVersion: api.NewOptInt64(int64(submission.ValidatedAssetVersion)),
|
||||
Completed: submission.Completed,
|
||||
UploadedAssetID: api.NewOptInt64(int64(submission.UploadedAssetID)),
|
||||
StatusID: int32(submission.StatusID),
|
||||
})
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
@@ -2,6 +2,7 @@ package web_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"time"
|
||||
@@ -34,10 +35,10 @@ var(
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCreationPhaseMapfixesLimit = fmt.Errorf("%w: Active mapfixes limited to 20", ErrPermissionDenied)
|
||||
ErrActiveMapfixSameTargetAssetID = fmt.Errorf("%w: There is an active mapfix for this map already", ErrPermissionDenied)
|
||||
ErrCreationPhaseMapfixesLimit = errors.New("Active mapfixes limited to 20")
|
||||
ErrActiveMapfixSameTargetAssetID = errors.New("There is an active mapfix for this map already")
|
||||
ErrAcceptOwnMapfix = fmt.Errorf("%w: You cannot accept your own mapfix as the submitter", ErrPermissionDenied)
|
||||
ErrCreateMapfixRateLimit = fmt.Errorf("%w: You must not create more than 5 mapfixes every 10 minutes", ErrTooManyRequests)
|
||||
ErrCreateMapfixRateLimit = errors.New("You must not create more than 5 mapfixes every 10 minutes")
|
||||
)
|
||||
|
||||
// POST /mapfixes
|
||||
@@ -447,12 +448,7 @@ func (svc *Service) ActionMapfixRequestChanges(ctx context.Context, params api.A
|
||||
target_status := model.MapfixStatusChangesRequested
|
||||
update := service.NewMapfixUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allow_statuses := []model.MapfixStatus{
|
||||
model.MapfixStatusUploaded,
|
||||
model.MapfixStatusValidated,
|
||||
model.MapfixStatusAcceptedUnvalidated,
|
||||
model.MapfixStatusSubmitted,
|
||||
}
|
||||
allow_statuses := []model.MapfixStatus{model.MapfixStatusValidated, model.MapfixStatusAcceptedUnvalidated, model.MapfixStatusSubmitted}
|
||||
err = svc.inner.UpdateMapfixIfStatus(ctx, params.MapfixID, allow_statuses, update)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -558,11 +554,7 @@ func (svc *Service) ActionMapfixTriggerSubmit(ctx context.Context, params api.Ac
|
||||
target_status := model.MapfixStatusSubmitting
|
||||
update := service.NewMapfixUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allow_statuses := []model.MapfixStatus{
|
||||
model.MapfixStatusUnderConstruction,
|
||||
model.MapfixStatusChangesRequested,
|
||||
model.MapfixStatusSubmitted,
|
||||
}
|
||||
allow_statuses := []model.MapfixStatus{model.MapfixStatusUnderConstruction, model.MapfixStatusChangesRequested}
|
||||
err = svc.inner.UpdateMapfixIfStatus(ctx, params.MapfixID, allow_statuses, update)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -86,61 +86,6 @@ func (svc *Service) GetMap(ctx context.Context, params api.GetMapParams) (*api.M
|
||||
}, nil
|
||||
}
|
||||
|
||||
// SeedCombobulator implements seedCombobulator operation.
|
||||
//
|
||||
// Queue all maps for combobulator processing.
|
||||
//
|
||||
// POST /maps-admin/seed-combobulator
|
||||
func (svc *Service) SeedCombobulator(ctx context.Context) error {
|
||||
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
|
||||
if !ok {
|
||||
return ErrUserInfo
|
||||
}
|
||||
|
||||
has_role, err := userInfo.HasRoleSubmissionRelease()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !has_role {
|
||||
return ErrPermissionDeniedNeedRoleSubmissionRelease
|
||||
}
|
||||
|
||||
maps, err := svc.inner.GetAllMaps(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, m := range maps {
|
||||
if err := svc.inner.NatsSeedCombobulator(uint64(m.ID)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Combobulate implements combobulate operation.
|
||||
//
|
||||
// Queue a map for combobulator processing.
|
||||
//
|
||||
// POST /maps-admin/combobulate
|
||||
func (svc *Service) Combobulate(ctx context.Context, params api.CombobulateParams) error {
|
||||
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
|
||||
if !ok {
|
||||
return ErrUserInfo
|
||||
}
|
||||
|
||||
has_role, err := userInfo.HasRoleSubmissionRelease()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !has_role {
|
||||
return ErrPermissionDeniedNeedRoleSubmissionRelease
|
||||
}
|
||||
|
||||
return svc.inner.NatsSeedCombobulator(uint64(params.MapID));
|
||||
}
|
||||
|
||||
// DownloadMapAsset invokes downloadMapAsset operation.
|
||||
//
|
||||
// Download the map asset.
|
||||
|
||||
@@ -2,7 +2,7 @@ package web_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"errors"
|
||||
|
||||
"git.itzana.me/strafesnet/go-grpc/auth"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/api"
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
|
||||
var (
|
||||
// ErrMissingSessionID there is no session id
|
||||
ErrMissingSessionID = fmt.Errorf("%w: SessionID missing", ErrUserInfo)
|
||||
ErrMissingSessionID = errors.New("SessionID missing")
|
||||
// ErrInvalidSession caller does not have a valid session
|
||||
ErrInvalidSession = fmt.Errorf("%w: Session invalid", ErrUserInfo)
|
||||
ErrInvalidSession = errors.New("Session invalid")
|
||||
)
|
||||
|
||||
type UserInfoHandle struct {
|
||||
|
||||
@@ -12,8 +12,6 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
ErrBadRequest = errors.New("Bad request")
|
||||
ErrTooManyRequests = errors.New("Too many requests")
|
||||
// ErrPermissionDenied caller does not have the required role
|
||||
ErrPermissionDenied = errors.New("Permission denied")
|
||||
// ErrUserInfo user info is missing for some reason
|
||||
@@ -28,7 +26,7 @@ var (
|
||||
ErrPermissionDeniedNeedRoleMapDownload = fmt.Errorf("%w: Need Role MapDownload", ErrPermissionDenied)
|
||||
ErrPermissionDeniedNeedRoleScriptWrite = fmt.Errorf("%w: Need Role ScriptWrite", ErrPermissionDenied)
|
||||
ErrPermissionDeniedNeedRoleMaptest = fmt.Errorf("%w: Need Role Maptest", ErrPermissionDenied)
|
||||
ErrNegativeID = fmt.Errorf("%w: A negative ID was provided", ErrBadRequest)
|
||||
ErrNegativeID = errors.New("A negative ID was provided")
|
||||
)
|
||||
|
||||
type Service struct {
|
||||
@@ -51,20 +49,14 @@ func NewService(
|
||||
// Used for common default response.
|
||||
func (svc *Service) NewError(ctx context.Context, err error) *api.ErrorStatusCode {
|
||||
status := 500
|
||||
if errors.Is(err, ErrBadRequest) {
|
||||
status = 400
|
||||
}
|
||||
if errors.Is(err, ErrUserInfo) {
|
||||
status = 401
|
||||
if errors.Is(err, datastore.ErrNotExist) {
|
||||
status = 404
|
||||
}
|
||||
if errors.Is(err, ErrPermissionDenied) {
|
||||
status = 403
|
||||
}
|
||||
if errors.Is(err, datastore.ErrNotExist) {
|
||||
status = 404
|
||||
}
|
||||
if errors.Is(err, ErrTooManyRequests) {
|
||||
status = 429
|
||||
if errors.Is(err, ErrUserInfo) {
|
||||
status = 401
|
||||
}
|
||||
return &api.ErrorStatusCode{
|
||||
StatusCode: status,
|
||||
|
||||
207
pkg/web_api/submission_reviews.go
Normal file
207
pkg/web_api/submission_reviews.go
Normal file
@@ -0,0 +1,207 @@
|
||||
package web_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/api"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/service"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrReviewNotOwner = errors.New("You can only edit your own review")
|
||||
ErrReviewNotSubmitted = errors.New("Reviews can only be created or edited when the submission is in Submitted status")
|
||||
)
|
||||
|
||||
// ListSubmissionReviews implements listSubmissionReviews operation.
|
||||
//
|
||||
// Get all reviews for a submission.
|
||||
//
|
||||
// GET /submissions/{SubmissionID}/reviews
|
||||
func (svc *Service) ListSubmissionReviews(ctx context.Context, params api.ListSubmissionReviewsParams) ([]api.SubmissionReview, error) {
|
||||
reviews, err := svc.inner.ListSubmissionReviewsBySubmission(ctx, params.SubmissionID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var resp []api.SubmissionReview
|
||||
for _, review := range reviews {
|
||||
resp = append(resp, api.SubmissionReview{
|
||||
ID: review.ID,
|
||||
SubmissionID: review.SubmissionID,
|
||||
ReviewerID: int64(review.ReviewerID),
|
||||
Recommend: review.Recommend,
|
||||
Description: review.Description,
|
||||
Outdated: review.Outdated,
|
||||
CreatedAt: review.CreatedAt.Unix(),
|
||||
UpdatedAt: review.UpdatedAt.Unix(),
|
||||
})
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
// CreateSubmissionReview implements createSubmissionReview operation.
|
||||
//
|
||||
// Create a review for a submission.
|
||||
//
|
||||
// POST /submissions/{SubmissionID}/reviews
|
||||
func (svc *Service) CreateSubmissionReview(ctx context.Context, req *api.SubmissionReviewCreate, params api.CreateSubmissionReviewParams) (*api.SubmissionReview, error) {
|
||||
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
|
||||
if !ok {
|
||||
return nil, ErrUserInfo
|
||||
}
|
||||
|
||||
// Check if caller has required role
|
||||
has_role, err := userInfo.HasRoleSubmissionReview()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !has_role {
|
||||
return nil, ErrPermissionDeniedNeedRoleSubmissionReview
|
||||
}
|
||||
|
||||
userId, err := userInfo.GetUserID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check if submission exists and is in Submitted status
|
||||
submission, err := svc.inner.GetSubmission(ctx, params.SubmissionID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if submission.StatusID != model.SubmissionStatusSubmitted {
|
||||
return nil, ErrReviewNotSubmitted
|
||||
}
|
||||
|
||||
// Check if user already has a review for this submission
|
||||
existingReview, err := svc.inner.GetSubmissionReviewBySubmissionAndReviewer(ctx, params.SubmissionID, userId)
|
||||
if err != nil && !errors.Is(err, datastore.ErrNotExist) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// If review exists, update it instead
|
||||
if err == nil {
|
||||
update := service.NewSubmissionReviewUpdate()
|
||||
update.SetRecommend(req.Recommend)
|
||||
update.SetDescription(req.Description)
|
||||
update.SetOutdated(false)
|
||||
|
||||
err = svc.inner.UpdateSubmissionReview(ctx, existingReview.ID, update)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Fetch updated review
|
||||
updatedReview, err := svc.inner.GetSubmissionReview(ctx, existingReview.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &api.SubmissionReview{
|
||||
ID: updatedReview.ID,
|
||||
SubmissionID: updatedReview.SubmissionID,
|
||||
ReviewerID: int64(updatedReview.ReviewerID),
|
||||
Recommend: updatedReview.Recommend,
|
||||
Description: updatedReview.Description,
|
||||
Outdated: updatedReview.Outdated,
|
||||
CreatedAt: updatedReview.CreatedAt.Unix(),
|
||||
UpdatedAt: updatedReview.UpdatedAt.Unix(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Create new review
|
||||
review := model.SubmissionReview{
|
||||
SubmissionID: params.SubmissionID,
|
||||
ReviewerID: userId,
|
||||
Recommend: req.Recommend,
|
||||
Description: req.Description,
|
||||
Outdated: false,
|
||||
}
|
||||
|
||||
createdReview, err := svc.inner.CreateSubmissionReview(ctx, review)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &api.SubmissionReview{
|
||||
ID: createdReview.ID,
|
||||
SubmissionID: createdReview.SubmissionID,
|
||||
ReviewerID: int64(createdReview.ReviewerID),
|
||||
Recommend: createdReview.Recommend,
|
||||
Description: createdReview.Description,
|
||||
Outdated: createdReview.Outdated,
|
||||
CreatedAt: createdReview.CreatedAt.Unix(),
|
||||
UpdatedAt: createdReview.UpdatedAt.Unix(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// UpdateSubmissionReview implements updateSubmissionReview operation.
|
||||
//
|
||||
// Update an existing review.
|
||||
//
|
||||
// PATCH /submissions/{SubmissionID}/reviews/{ReviewID}
|
||||
func (svc *Service) UpdateSubmissionReview(ctx context.Context, req *api.SubmissionReviewCreate, params api.UpdateSubmissionReviewParams) (*api.SubmissionReview, error) {
|
||||
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
|
||||
if !ok {
|
||||
return nil, ErrUserInfo
|
||||
}
|
||||
|
||||
userId, err := userInfo.GetUserID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Get the existing review
|
||||
review, err := svc.inner.GetSubmissionReview(ctx, params.ReviewID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check if user is the owner of the review
|
||||
if review.ReviewerID != userId {
|
||||
return nil, ErrReviewNotOwner
|
||||
}
|
||||
|
||||
// Check if submission is still in Submitted status
|
||||
submission, err := svc.inner.GetSubmission(ctx, params.SubmissionID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if submission.StatusID != model.SubmissionStatusSubmitted {
|
||||
return nil, ErrReviewNotSubmitted
|
||||
}
|
||||
|
||||
// Update the review
|
||||
update := service.NewSubmissionReviewUpdate()
|
||||
update.SetRecommend(req.Recommend)
|
||||
update.SetDescription(req.Description)
|
||||
update.SetOutdated(false) // Clear outdated flag on edit
|
||||
|
||||
err = svc.inner.UpdateSubmissionReview(ctx, params.ReviewID, update)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Fetch updated review
|
||||
updatedReview, err := svc.inner.GetSubmissionReview(ctx, params.ReviewID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &api.SubmissionReview{
|
||||
ID: updatedReview.ID,
|
||||
SubmissionID: updatedReview.SubmissionID,
|
||||
ReviewerID: int64(updatedReview.ReviewerID),
|
||||
Recommend: updatedReview.Recommend,
|
||||
Description: updatedReview.Description,
|
||||
Outdated: updatedReview.Outdated,
|
||||
CreatedAt: updatedReview.CreatedAt.Unix(),
|
||||
UpdatedAt: updatedReview.UpdatedAt.Unix(),
|
||||
}, nil
|
||||
}
|
||||
@@ -2,6 +2,7 @@ package web_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"time"
|
||||
@@ -25,13 +26,12 @@ var(
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCreationPhaseSubmissionsLimit = fmt.Errorf("%w: Active submissions limited to 20", ErrPermissionDenied)
|
||||
ErrUploadedAssetIDAlreadyExists = fmt.Errorf("%w: The submission UploadedAssetID is already set", ErrPermissionDenied)
|
||||
ErrReleaseInvalidStatus = fmt.Errorf("%w: Only submissions with Uploaded status can be released", ErrPermissionDenied)
|
||||
ErrReleaseNoUploadedAssetID = fmt.Errorf("%w: Only submissions with a UploadedAssetID can be released", ErrPermissionDenied)
|
||||
ErrCreationPhaseSubmissionsLimit = errors.New("Active submissions limited to 20")
|
||||
ErrUploadedAssetIDAlreadyExists = errors.New("The submission UploadedAssetID is already set")
|
||||
ErrReleaseInvalidStatus = errors.New("Only submissions with Uploaded status can be released")
|
||||
ErrReleaseNoUploadedAssetID = errors.New("Only submissions with a UploadedAssetID can be released")
|
||||
ErrAcceptOwnSubmission = fmt.Errorf("%w: You cannot accept your own submission as the submitter", ErrPermissionDenied)
|
||||
ErrCreateSubmissionRateLimit = fmt.Errorf("%w: You must not create more than 5 submissions every 10 minutes", ErrTooManyRequests)
|
||||
ErrDisplayNameNotUnique = fmt.Errorf("%w: Cannot submit: A map exists with the same DisplayName", ErrPermissionDenied)
|
||||
ErrCreateSubmissionRateLimit = errors.New("You must not create more than 5 submissions every 10 minutes")
|
||||
)
|
||||
|
||||
// POST /submissions
|
||||
@@ -437,12 +437,7 @@ func (svc *Service) ActionSubmissionRequestChanges(ctx context.Context, params a
|
||||
target_status := model.SubmissionStatusChangesRequested
|
||||
update := service.NewSubmissionUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allowed_statuses := []model.SubmissionStatus{
|
||||
model.SubmissionStatusUploaded,
|
||||
model.SubmissionStatusValidated,
|
||||
model.SubmissionStatusAcceptedUnvalidated,
|
||||
model.SubmissionStatusSubmitted,
|
||||
}
|
||||
allowed_statuses := []model.SubmissionStatus{model.SubmissionStatusValidated, model.SubmissionStatusAcceptedUnvalidated, model.SubmissionStatusSubmitted}
|
||||
err = svc.inner.UpdateSubmissionIfStatus(ctx, params.SubmissionID, allowed_statuses, update)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -552,33 +547,11 @@ func (svc *Service) ActionSubmissionTriggerSubmit(ctx context.Context, params ap
|
||||
}
|
||||
}
|
||||
|
||||
// check for maps with the exact same name
|
||||
filter := service.NewMapFilter()
|
||||
filter.SetDisplayName(submission.DisplayName)
|
||||
maps_list, err := svc.inner.ListMaps(
|
||||
ctx,
|
||||
filter,
|
||||
model.Page{
|
||||
Number: 1,
|
||||
Size: 1,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(maps_list) != 0 {
|
||||
return ErrDisplayNameNotUnique
|
||||
}
|
||||
|
||||
// transaction
|
||||
target_status := model.SubmissionStatusSubmitting
|
||||
update := service.NewSubmissionUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allowed_statuses := []model.SubmissionStatus{
|
||||
model.SubmissionStatusUnderConstruction,
|
||||
model.SubmissionStatusChangesRequested,
|
||||
model.SubmissionStatusSubmitted,
|
||||
}
|
||||
allowed_statuses := []model.SubmissionStatus{model.SubmissionStatusUnderConstruction, model.SubmissionStatusChangesRequested}
|
||||
err = svc.inner.UpdateSubmissionIfStatus(ctx, params.SubmissionID, allowed_statuses, update)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -17,7 +17,7 @@ reqwest = { version = "0", features = [
|
||||
# default features
|
||||
"charset", "http2", "system-proxy"
|
||||
], default-features = false }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
serde_repr = "0.1.19"
|
||||
url = "2"
|
||||
|
||||
@@ -4,18 +4,18 @@ version = "0.1.1"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
async-nats.workspace = true
|
||||
async-nats = "0.45.0"
|
||||
futures = "0.3.31"
|
||||
rbx_asset.workspace = true
|
||||
rbx_binary.workspace = true
|
||||
rbx_dom_weak.workspace = true
|
||||
rbx_asset = { version = "0.5.0", features = ["gzip", "rustls-tls"], default-features = false, registry = "strafesnet" }
|
||||
rbx_binary = "2.0.0"
|
||||
rbx_dom_weak = "4.0.0"
|
||||
rbx_reflection_database = "2.0.1"
|
||||
rbx_xml = "2.0.0"
|
||||
regex = { version = "1.11.3", default-features = false }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde = { version = "1.0.215", features = ["derive"] }
|
||||
serde_json = "1.0.133"
|
||||
siphasher = "1.0.1"
|
||||
tokio.workspace = true
|
||||
tokio = { version = "1.41.1", features = ["macros", "rt-multi-thread", "signal"] }
|
||||
heck = "0.5.0"
|
||||
rust-grpc = { version = "1.6.1", registry = "strafesnet" }
|
||||
tonic = "0.14.1"
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
FROM debian:trixie-slim AS runtime
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends libssl3t64 ca-certificates && rm -rf /var/lib/apt/lists/*
|
||||
COPY /target/release/maps-validation /
|
||||
FROM alpine:3.21 AS runtime
|
||||
COPY /target/x86_64-unknown-linux-musl/release/maps-validation /
|
||||
ENTRYPOINT ["/maps-validation"]
|
||||
|
||||
@@ -324,24 +324,25 @@ pub fn get_model_info<'a>(dom:&'a rbx_dom_weak::WeakDom,model_instance:&'a rbx_d
|
||||
}
|
||||
|
||||
// check if an observed string matches an expected string
|
||||
pub struct StringEquality<'a,Str>{
|
||||
pub struct StringCheck<'a,T,Str>(Result<T,StringCheckContext<'a,Str>>);
|
||||
pub struct StringCheckContext<'a,Str>{
|
||||
observed:&'a str,
|
||||
expected:Str,
|
||||
}
|
||||
impl<'a,Str> StringEquality<'a,Str>
|
||||
impl<'a,Str> StringCheckContext<'a,Str>
|
||||
where
|
||||
&'a str:PartialEq<Str>,
|
||||
{
|
||||
/// Compute the StringCheck, passing through the provided value on success.
|
||||
fn check<T>(self,value:T)->Result<T,Self>{
|
||||
fn check<T>(self,value:T)->StringCheck<'a,T,Str>{
|
||||
if self.observed==self.expected{
|
||||
Ok(value)
|
||||
StringCheck(Ok(value))
|
||||
}else{
|
||||
Err(self)
|
||||
StringCheck(Err(self))
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<Str:std::fmt::Display> std::fmt::Display for StringEquality<'_,Str>{
|
||||
impl<Str:std::fmt::Display> std::fmt::Display for StringCheckContext<'_,Str>{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"expected: {}, observed: {}",self.expected,self.observed)
|
||||
}
|
||||
@@ -463,66 +464,19 @@ impl TryFrom<MapInfo<'_>> for MapInfoOwned{
|
||||
struct Exists;
|
||||
struct Absent;
|
||||
|
||||
enum DisplayNameError<'a>{
|
||||
TitleCase(StringEquality<'a,String>),
|
||||
Empty(StringEmpty),
|
||||
TooLong(usize),
|
||||
StringValue(StringValueError),
|
||||
}
|
||||
fn check_display_name<'a>(display_name:Result<&'a str,StringValueError>)->Result<&'a str,DisplayNameError<'a>>{
|
||||
// DisplayName StringValue can be missing or whatever
|
||||
let display_name=display_name.map_err(DisplayNameError::StringValue)?;
|
||||
|
||||
// DisplayName cannot be ""
|
||||
let display_name=check_empty(display_name).map_err(DisplayNameError::Empty)?;
|
||||
|
||||
// DisplayName cannot exceed 50 characters
|
||||
if 50<display_name.len(){
|
||||
return Err(DisplayNameError::TooLong(display_name.len()));
|
||||
}
|
||||
|
||||
// Check title case
|
||||
let display_name=StringEquality{
|
||||
observed:display_name,
|
||||
expected:display_name.to_title_case(),
|
||||
}.check(display_name).map_err(DisplayNameError::TitleCase)?;
|
||||
|
||||
Ok(display_name)
|
||||
}
|
||||
|
||||
enum CreatorError{
|
||||
Empty(StringEmpty),
|
||||
TooLong(usize),
|
||||
StringValue(StringValueError),
|
||||
}
|
||||
fn check_creator<'a>(creator:Result<&'a str,StringValueError>)->Result<&'a str,CreatorError>{
|
||||
// Creator StringValue can be missing or whatever
|
||||
let creator=creator.map_err(CreatorError::StringValue)?;
|
||||
|
||||
// Creator cannot be ""
|
||||
let creator=check_empty(creator).map_err(CreatorError::Empty)?;
|
||||
|
||||
// Creator cannot exceed 50 characters
|
||||
if 50<creator.len(){
|
||||
return Err(CreatorError::TooLong(creator.len()));
|
||||
}
|
||||
|
||||
Ok(creator)
|
||||
}
|
||||
|
||||
/// The result of every map check.
|
||||
struct MapCheck<'a>{
|
||||
// === METADATA CHECKS ===
|
||||
// The root must be of class Model
|
||||
model_class:Result<(),StringEquality<'a,&'static str>>,
|
||||
model_class:StringCheck<'a,(),&'static str>,
|
||||
// Model's name must be in snake case
|
||||
model_name:Result<(),StringEquality<'a,String>>,
|
||||
model_name:StringCheck<'a,(),String>,
|
||||
// Map must have a StringValue named DisplayName.
|
||||
// Value must not be empty, must be in title case.
|
||||
display_name:Result<&'a str,DisplayNameError<'a>>,
|
||||
display_name:Result<Result<StringCheck<'a,&'a str,String>,StringEmpty>,StringValueError>,
|
||||
// Map must have a StringValue named Creator.
|
||||
// Value must not be empty.
|
||||
creator:Result<&'a str,CreatorError>,
|
||||
creator:Result<Result<&'a str,StringEmpty>,StringValueError>,
|
||||
// The prefix of the model's name must match the game it was submitted for.
|
||||
// bhop_ for bhop, and surf_ for surf
|
||||
game_id:Result<GameID,ParseGameIDError>,
|
||||
@@ -557,22 +511,27 @@ struct MapCheck<'a>{
|
||||
impl<'a> ModelInfo<'a>{
|
||||
fn check(self)->MapCheck<'a>{
|
||||
// Check class is exactly "Model"
|
||||
let model_class=StringEquality{
|
||||
let model_class=StringCheckContext{
|
||||
observed:self.model_class,
|
||||
expected:"Model",
|
||||
}.check(());
|
||||
|
||||
// Check model name is snake case
|
||||
let model_name=StringEquality{
|
||||
let model_name=StringCheckContext{
|
||||
observed:self.model_name,
|
||||
expected:self.model_name.to_snake_case(),
|
||||
}.check(());
|
||||
|
||||
// Check display name is not empty and has title case
|
||||
let display_name=check_display_name(self.map_info.display_name);
|
||||
let display_name=self.map_info.display_name.map(|display_name|{
|
||||
check_empty(display_name).map(|display_name|StringCheckContext{
|
||||
observed:display_name,
|
||||
expected:display_name.to_title_case(),
|
||||
}.check(display_name))
|
||||
});
|
||||
|
||||
// Check Creator is not empty
|
||||
let creator=check_creator(self.map_info.creator);
|
||||
let creator=self.map_info.creator.map(check_empty);
|
||||
|
||||
// Check GameID (model name was prefixed with bhop_ surf_ etc)
|
||||
let game_id=self.map_info.game_id;
|
||||
@@ -671,10 +630,10 @@ impl MapCheck<'_>{
|
||||
fn result(self)->Result<MapInfoOwned,Result<MapCheckList,serde_json::Error>>{
|
||||
match self{
|
||||
MapCheck{
|
||||
model_class:Ok(()),
|
||||
model_name:Ok(()),
|
||||
display_name:Ok(display_name),
|
||||
creator:Ok(creator),
|
||||
model_class:StringCheck(Ok(())),
|
||||
model_name:StringCheck(Ok(())),
|
||||
display_name:Ok(Ok(StringCheck(Ok(display_name)))),
|
||||
creator:Ok(Ok(creator)),
|
||||
game_id:Ok(game_id),
|
||||
mapstart:Ok(Exists),
|
||||
mode_start_counts:DuplicateCheck(Ok(())),
|
||||
@@ -778,25 +737,27 @@ macro_rules! summary_format{
|
||||
impl MapCheck<'_>{
|
||||
fn itemize(&self)->Result<MapCheckList,serde_json::Error>{
|
||||
let model_class=match &self.model_class{
|
||||
Ok(())=>passed!("ModelClass"),
|
||||
Err(context)=>summary_format!("ModelClass","Invalid model class: {context}"),
|
||||
StringCheck(Ok(()))=>passed!("ModelClass"),
|
||||
StringCheck(Err(context))=>summary_format!("ModelClass","Invalid model class: {context}"),
|
||||
};
|
||||
let model_name=match &self.model_name{
|
||||
Ok(())=>passed!("ModelName"),
|
||||
Err(context)=>summary_format!("ModelName","Model name must have snake_case: {context}"),
|
||||
StringCheck(Ok(()))=>passed!("ModelName"),
|
||||
StringCheck(Err(context))=>summary_format!("ModelName","Model name must have snake_case: {context}"),
|
||||
};
|
||||
let display_name=match &self.display_name{
|
||||
Ok(_)=>passed!("DisplayName"),
|
||||
Err(DisplayNameError::TitleCase(context))=>summary_format!("DisplayName","DisplayName must have Title Case: {context}"),
|
||||
Err(DisplayNameError::Empty(context))=>summary_format!("DisplayName","Invalid DisplayName: {context}"),
|
||||
Err(DisplayNameError::TooLong(context))=>summary_format!("DisplayName","DisplayName is too long: {context} characters (50 characters max)"),
|
||||
Err(DisplayNameError::StringValue(context))=>summary_format!("DisplayName","DisplayName StringValue: {context}"),
|
||||
Ok(Ok(StringCheck(Ok(_))))=>passed!("DisplayName"),
|
||||
Ok(Ok(StringCheck(Err(context))))=>summary_format!("DisplayName","DisplayName must have Title Case: {context}"),
|
||||
Ok(Err(context))=>summary_format!("DisplayName","Invalid DisplayName: {context}"),
|
||||
Err(StringValueError::ObjectNotFound)=>summary!("DisplayName","Missing DisplayName StringValue".to_owned()),
|
||||
Err(StringValueError::ValueNotSet)=>summary!("DisplayName","DisplayName Value not set".to_owned()),
|
||||
Err(StringValueError::NonStringValue)=>summary!("DisplayName","DisplayName Value is not a String".to_owned()),
|
||||
};
|
||||
let creator=match &self.creator{
|
||||
Ok(_)=>passed!("Creator"),
|
||||
Err(CreatorError::Empty(context))=>summary_format!("Creator","Invalid Creator: {context}"),
|
||||
Err(CreatorError::TooLong(context))=>summary_format!("Creator","Creator is too long: {context} characters (50 characters max)"),
|
||||
Err(CreatorError::StringValue(context))=>summary_format!("Creator","Creator StringValue: {context}"),
|
||||
Ok(Ok(_))=>passed!("Creator"),
|
||||
Ok(Err(context))=>summary_format!("Creator","Invalid Creator: {context}"),
|
||||
Err(StringValueError::ObjectNotFound)=>summary!("Creator","Missing Creator StringValue".to_owned()),
|
||||
Err(StringValueError::ValueNotSet)=>summary!("Creator","Creator Value not set".to_owned()),
|
||||
Err(StringValueError::NonStringValue)=>summary!("Creator","Creator Value is not a String".to_owned()),
|
||||
};
|
||||
let game_id=match &self.game_id{
|
||||
Ok(_)=>passed!("GameID"),
|
||||
|
||||
@@ -79,15 +79,6 @@ pub enum StringValueError{
|
||||
ValueNotSet,
|
||||
NonStringValue,
|
||||
}
|
||||
impl std::fmt::Display for StringValueError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
match self{
|
||||
StringValueError::ObjectNotFound=>write!(f,"Missing StringValue"),
|
||||
StringValueError::ValueNotSet=>write!(f,"Value not set"),
|
||||
StringValueError::NonStringValue=>write!(f,"Value is not a String"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn string_value(instance:Option<&rbx_dom_weak::Instance>)->Result<&str,StringValueError>{
|
||||
let instance=instance.ok_or(StringValueError::ObjectNotFound)?;
|
||||
|
||||
@@ -29,13 +29,6 @@ const ReviewActions = {
|
||||
confirmMessage: "Are you ready to submit this for review? The model version is locked in once submitted, but you can revoke it later if needed.",
|
||||
requiresConfirmation: true
|
||||
} as ReviewAction,
|
||||
Update: {
|
||||
name: "Update Model",
|
||||
action: "trigger-submit",
|
||||
confirmTitle: "Re-submit Latest Version",
|
||||
confirmMessage: "This action is equivalent to clicking Revoke and then clicking Submit.",
|
||||
requiresConfirmation: true
|
||||
} as ReviewAction,
|
||||
AdminSubmit: {
|
||||
name: "Submit on Behalf of User",
|
||||
action: "trigger-submit",
|
||||
@@ -190,13 +183,6 @@ const ReviewButtons: React.FC<ReviewButtonsProps> = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (status === Status.Submitted) {
|
||||
submitterButtons.push({
|
||||
action: ReviewActions.Update,
|
||||
color: "success",
|
||||
});
|
||||
}
|
||||
|
||||
if (StatusMatches(status, [Status.Submitted, Status.ChangesRequested])) {
|
||||
submitterButtons.push({
|
||||
action: ReviewActions.Revoke,
|
||||
@@ -243,7 +229,7 @@ const ReviewButtons: React.FC<ReviewButtonsProps> = ({
|
||||
});
|
||||
}
|
||||
|
||||
if (StatusMatches(status, [Status.Uploaded, Status.Validated, Status.AcceptedUnvalidated, Status.Submitted]) && !is_submitter) {
|
||||
if (StatusMatches(status, [Status.Validated, Status.AcceptedUnvalidated, Status.Submitted]) && !is_submitter) {
|
||||
reviewerButtons.push({
|
||||
action: ReviewActions.RequestChanges,
|
||||
color: "warning",
|
||||
|
||||
@@ -174,7 +174,7 @@ export default function ReviewerDashboardPage() {
|
||||
const [scriptPoliciesCount, setScriptPoliciesCount] = useState<number>(0);
|
||||
const [isLoadingScripts, setIsLoadingScripts] = useState(false);
|
||||
|
||||
// Fetch user roles
|
||||
// Fetch user roles
|
||||
useEffect(() => {
|
||||
// Fetch roles from API
|
||||
const controller = new AbortController();
|
||||
@@ -459,9 +459,6 @@ export default function ReviewerDashboardPage() {
|
||||
);
|
||||
const canReviewScripts = hasRole(userRoles, RolesConstants.ScriptWrite);
|
||||
|
||||
const tabIndexSubmissions = 0;
|
||||
const tabIndexMapfixes = canReviewSubmissions ? 1 : 0;
|
||||
|
||||
if (!hasAnyReviewerRole(userRoles)) {
|
||||
return (
|
||||
<Webpage>
|
||||
@@ -522,7 +519,7 @@ export default function ReviewerDashboardPage() {
|
||||
mb: 4
|
||||
}}>
|
||||
{canReviewSubmissions && (
|
||||
<Card onClick={()=>setTabValue(tabIndexSubmissions)}>
|
||||
<Card>
|
||||
<CardContent>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 2 }}>
|
||||
<AssignmentIcon sx={{ fontSize: 40, color: 'primary.main' }} />
|
||||
@@ -546,7 +543,7 @@ export default function ReviewerDashboardPage() {
|
||||
)}
|
||||
|
||||
{canReviewMapfixes && (
|
||||
<Card onClick={()=>setTabValue(tabIndexMapfixes)}>
|
||||
<Card>
|
||||
<CardContent>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 2 }}>
|
||||
<BuildIcon sx={{ fontSize: 40, color: 'secondary.main' }} />
|
||||
@@ -627,8 +624,8 @@ export default function ReviewerDashboardPage() {
|
||||
})()}
|
||||
</Box>
|
||||
}
|
||||
id={`reviewer-tab-${tabIndexSubmissions}`}
|
||||
aria-controls={`reviewer-tabpanel-${tabIndexSubmissions}`}
|
||||
id="reviewer-tab-0"
|
||||
aria-controls="reviewer-tabpanel-0"
|
||||
/>
|
||||
)}
|
||||
{canReviewMapfixes && (
|
||||
@@ -648,8 +645,8 @@ export default function ReviewerDashboardPage() {
|
||||
})()}
|
||||
</Box>
|
||||
}
|
||||
id={`reviewer-tab-${tabIndexMapfixes}`}
|
||||
aria-controls={`reviewer-tabpanel-${tabIndexMapfixes}`}
|
||||
id={`reviewer-tab-${canReviewSubmissions ? 1 : 0}`}
|
||||
aria-controls={`reviewer-tabpanel-${canReviewSubmissions ? 1 : 0}`}
|
||||
/>
|
||||
)}
|
||||
</Tabs>
|
||||
@@ -657,7 +654,7 @@ export default function ReviewerDashboardPage() {
|
||||
|
||||
{/* Submissions Tab */}
|
||||
{canReviewSubmissions && (
|
||||
<TabPanel value={tabValue} index={tabIndexSubmissions}>
|
||||
<TabPanel value={tabValue} index={0}>
|
||||
{userRoles && submissions && groupSubmissionsByStatus(submissions.Submissions, userRoles).reduce((sum, group) => sum + group.items.length, 0) === 0 ? (
|
||||
<Alert severity="success">
|
||||
No submissions currently need your review. Great job!
|
||||
@@ -743,7 +740,7 @@ export default function ReviewerDashboardPage() {
|
||||
|
||||
{/* Map Fixes Tab */}
|
||||
{canReviewMapfixes && (
|
||||
<TabPanel value={tabValue} index={tabIndexMapfixes}>
|
||||
<TabPanel value={tabValue} index={canReviewSubmissions ? 1 : 0}>
|
||||
{userRoles && mapfixes && groupMapfixesByStatus(mapfixes.Mapfixes, userRoles).reduce((sum, group) => sum + group.items.length, 0) === 0 ? (
|
||||
<Alert severity="success">
|
||||
No map fixes currently need your review. Great job!
|
||||
|
||||
Reference in New Issue
Block a user