Compare commits
153 Commits
compose-de
...
feature/ma
| Author | SHA1 | Date | |
|---|---|---|---|
| f4209ecd0a | |||
| e83c9db866 | |||
| 7bec80a2fe | |||
| 3e77edb1cc | |||
| 0b7ca534f3 | |||
| 264ce38c08 | |||
|
b1e10dc50e
|
|||
|
755616f46c
|
|||
| e41d34dd3d | |||
| f49e27e230 | |||
| d500462fc7 | |||
| ee2bc94312 | |||
| 84edc71574 | |||
| 7c5d8a2163 | |||
| 7eaa84a0ed | |||
| cf0cf9da7a | |||
| 74565e567a | |||
| ea65794255 | |||
| 58706a5687 | |||
| efeb525e19 | |||
|
5a1fe60a7b
|
|||
| 01cfe67848 | |||
| a19bc4d380 | |||
| ae006565d6 | |||
| 57bca99109 | |||
| cd09c9b18e | |||
| e48cbaff72 | |||
| 140d58b808 | |||
| ba761549b8 | |||
| 86643fef8d | |||
| 96af864c5e | |||
| 7db89fd99b | |||
| f2bb1b078d | |||
| 66878fba4e | |||
| bda99550be | |||
| 8a216c7e82 | |||
| e5277c05a1 | |||
| e4af76cfd4 | |||
| 30db1cc375 | |||
| b50c84f8cf | |||
| 7589ef7df6 | |||
| 8ab8c441b0 | |||
| a26b228ebe | |||
| 3654755540 | |||
| c2b50ffab2 | |||
| 75756917b1 | |||
| 8989c08857 | |||
| b2232f4177 | |||
| 7d1c4d2b6c | |||
| ca401d4b96 | |||
|
9ab80931bf
|
|||
|
09022e7292
|
|||
|
3400056c23
|
|||
|
57501d446f
|
|||
| 47c0fff0ec | |||
|
e6ef4e33ac
|
|||
|
aeba355d6c
|
|||
|
8ad94bcdc8
|
|||
|
66f02a2f45
|
|||
|
c6a685310e
|
|||
|
72b95ae271
|
|||
|
7c04cc5c23
|
|||
|
a3bf111b4e
|
|||
|
d82f44e9d2
|
|||
|
4c5a8c39c1
|
|||
|
4e55b1d665
|
|||
|
63d7bec3a3
|
|||
| b7c28616ad | |||
|
ce9b26378c
|
|||
|
df8f6463da
|
|||
|
6ccc56cc55
|
|||
| 89ab25dfb9 | |||
|
ffa1308e73
|
|||
|
b5a367e159
|
|||
|
6b05836a56
|
|||
|
8abee39d15
|
|||
| b0b5ff0725 | |||
|
456b62104b
|
|||
|
574a05424d
|
|||
| 0532965d37 | |||
|
51ba05df69
|
|||
|
30b594b345
|
|||
|
ab361dffd1
|
|||
|
d30a94e42d
|
|||
|
dae378a188
|
|||
|
cd912d683e
|
|||
|
f5dfd5a163
|
|||
|
18d51af7ca
|
|||
|
a45aa700d8
|
|||
|
907b6d2034
|
|||
|
a454ea01b6
|
|||
|
7f8c9210a5
|
|||
| f76f8cd136 | |||
|
55b79b8f9b
|
|||
|
1ce09e3f9b
|
|||
|
2878467cbf
|
|||
|
2639abc7c8
|
|||
|
231c11632b
|
|||
|
877f5c024f
|
|||
|
18ca6de7d3
|
|||
|
6ee8816eed
|
|||
|
de6163093f
|
|||
|
d7456d500b
|
|||
|
1a558f35cf
|
|||
|
b5b07ec1ce
|
|||
|
efd60f45df
|
|||
|
10507c62ab
|
|||
|
0d18167b03
|
|||
|
e90cc425ba
|
|||
|
76512bec0d
|
|||
|
412dadfc3e
|
|||
|
31cca0d450
|
|||
|
cfb7461c5a
|
|||
|
0cb419430a
|
|||
|
807d394646
|
|||
|
3c9d04d637
|
|||
|
94ad0ff774
|
|||
|
25f6c9e086
|
|||
|
a62a231b0a
|
|||
|
468204b299
|
|||
| 63458aee09 | |||
|
8297ed165b
|
|||
|
c98c3fe47e
|
|||
|
9c1e1e4347
|
|||
|
d37a8b9030
|
|||
|
75682a2375
|
|||
|
8f6012c7ef
|
|||
| f59979987f | |||
|
295b1d842b
|
|||
|
93147060d6
|
|||
|
fe539bf190
|
|||
|
759ac08aef
|
|||
|
34bc623ce6
|
|||
|
9999d1ff87
|
|||
|
8a0cd50b68
|
|||
| a232269d54 | |||
| a7c4ca4b49 | |||
| ca9f82a5aa | |||
| e1a2f6f075 | |||
| dad904cd86 | |||
| ad7117a69c | |||
| d566591ea6 | |||
| 424ef6238b | |||
| 0f0ab4d3e0 | |||
| 3e2d782289 | |||
| dc446c545f | |||
|
e234a87d05
|
|||
| 8ab772ea81 | |||
| 9b58b1d26a | |||
| 7689001e74 | |||
| e89abed3d5 | |||
| b792d33164 | |||
| 929b5949f0 |
41
.drone.yml
41
.drone.yml
@@ -24,7 +24,7 @@ steps:
|
||||
- staging
|
||||
|
||||
- name: build-validator
|
||||
image: clux/muslrust:1.86.0-stable
|
||||
image: clux/muslrust:1.91.0-stable
|
||||
commands:
|
||||
- make build-validator
|
||||
when:
|
||||
@@ -32,8 +32,17 @@ steps:
|
||||
- master
|
||||
- staging
|
||||
|
||||
- name: build-combobulator
|
||||
image: clux/muslrust:1.91.0-stable
|
||||
commands:
|
||||
- make build-combobulator
|
||||
when:
|
||||
branch:
|
||||
- master
|
||||
- staging
|
||||
|
||||
- name: build-frontend
|
||||
image: oven/bun:1.2.8
|
||||
image: oven/bun:1.3.3
|
||||
commands:
|
||||
- apt-get update
|
||||
- apt-get install make
|
||||
@@ -112,6 +121,29 @@ steps:
|
||||
event:
|
||||
- push
|
||||
|
||||
- name: image-combobulator
|
||||
image: plugins/docker
|
||||
settings:
|
||||
registry: registry.itzana.me
|
||||
repo: registry.itzana.me/strafesnet/maptest-combobulator
|
||||
tags:
|
||||
- ${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
|
||||
- ${DRONE_BRANCH}
|
||||
username:
|
||||
from_secret: REGISTRY_USER
|
||||
password:
|
||||
from_secret: REGISTRY_PASS
|
||||
dockerfile: combobulator/Containerfile
|
||||
context: .
|
||||
depends_on:
|
||||
- build-combobulator
|
||||
when:
|
||||
branch:
|
||||
- master
|
||||
- staging
|
||||
event:
|
||||
- push
|
||||
|
||||
- name: deploy
|
||||
image: argoproj/argocd:latest
|
||||
commands:
|
||||
@@ -119,6 +151,7 @@ steps:
|
||||
- argocd app --grpc-web set ${DRONE_BRANCH}-maps-service --kustomize-image registry.itzana.me/strafesnet/maptest-api:${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
|
||||
- argocd app --grpc-web set ${DRONE_BRANCH}-maps-service --kustomize-image registry.itzana.me/strafesnet/maptest-frontend:${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
|
||||
- argocd app --grpc-web set ${DRONE_BRANCH}-maps-service --kustomize-image registry.itzana.me/strafesnet/maptest-validator:${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
|
||||
- argocd app --grpc-web set ${DRONE_BRANCH}-maps-service --kustomize-image registry.itzana.me/strafesnet/maptest-combobulator:${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}
|
||||
environment:
|
||||
USERNAME:
|
||||
from_secret: ARGO_USER
|
||||
@@ -128,6 +161,7 @@ steps:
|
||||
- image-backend
|
||||
- image-frontend
|
||||
- image-validator
|
||||
- image-combobulator
|
||||
when:
|
||||
branch:
|
||||
- master
|
||||
@@ -143,12 +177,13 @@ steps:
|
||||
depends_on:
|
||||
- build-backend
|
||||
- build-validator
|
||||
- build-combobulator
|
||||
- build-frontend
|
||||
when:
|
||||
event:
|
||||
- pull_request
|
||||
---
|
||||
kind: signature
|
||||
hmac: cc7f2f8dac4285b5fa1df163bd92115f1a51a92050687cd08169e17803a2de4c
|
||||
hmac: 2d2a3b50b5864bd79efacf31f71b5a409a1782f6dbfb4669a418f577cc5517bd
|
||||
|
||||
...
|
||||
|
||||
3922
Cargo.lock
generated
3922
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,7 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"combobulator",
|
||||
"validation",
|
||||
"validation/api",
|
||||
"submissions-api-rs",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
16
Makefile
16
Makefile
@@ -9,12 +9,15 @@ build-backend:
|
||||
build-validator:
|
||||
cargo build --release --target x86_64-unknown-linux-musl --bin maps-validation
|
||||
|
||||
build-combobulator:
|
||||
cargo build --release --target x86_64-unknown-linux-musl --bin maps-combobulator
|
||||
|
||||
build-frontend:
|
||||
rm -rf web/build
|
||||
cd web && bun install --frozen-lockfile
|
||||
cd web && bun run build
|
||||
|
||||
build: build-backend build-validator build-frontend
|
||||
build: build-backend build-validator build-combobulator build-frontend
|
||||
|
||||
# image
|
||||
image-backend:
|
||||
@@ -23,6 +26,9 @@ image-backend:
|
||||
image-validator:
|
||||
docker build . -f validation/Containerfile -t maptest-validator
|
||||
|
||||
image-combobulator:
|
||||
docker build . -f combobulator/Containerfile -t maptest-combobulator
|
||||
|
||||
image-frontend:
|
||||
docker build web -f web/Containerfile -t maptest-frontend
|
||||
|
||||
@@ -33,10 +39,12 @@ docker-backend:
|
||||
docker-validator:
|
||||
make build-validator
|
||||
make image-validator
|
||||
docker-combobulator:
|
||||
make build-combobulator
|
||||
make image-combobulator
|
||||
docker-frontend:
|
||||
make build-frontend
|
||||
make image-frontend
|
||||
|
||||
docker: docker-backend docker-validator docker-frontend
|
||||
docker: docker-backend docker-validator docker-combobulator docker-frontend
|
||||
|
||||
.PHONY: clean build-backend build-validator build-frontend build image-backend image-validator image-frontend docker-backend docker-validator docker-frontend docker
|
||||
.PHONY: clean build-backend build-validator build-combobulator build-frontend build image-backend image-validator image-combobulator image-frontend docker-backend docker-validator docker-combobulator docker-frontend docker
|
||||
|
||||
10
README.md
10
README.md
@@ -13,11 +13,11 @@ Prerequisite: golang installed
|
||||
|
||||
1. Run `go generate` to ensure the generated API is up-to-date. This project uses [ogen](https://github.com/ogen-go/ogen).
|
||||
```bash
|
||||
go generate -run "go run github.com/ogen-go/ogen/cmd/ogen@latest --target api --clean openapi.yaml"
|
||||
go generate
|
||||
```
|
||||
2. Build the project.
|
||||
```bash
|
||||
go build git.itzana.me/strafesnet/maps-service
|
||||
make build-backend
|
||||
```
|
||||
|
||||
By default, the project opens at `localhost:8080`.
|
||||
@@ -47,14 +47,16 @@ AUTH_HOST="http://localhost:8083/"
|
||||
|
||||
Prerequisite: rust installed
|
||||
|
||||
1. `cd validation`
|
||||
2. `cargo run --release`
|
||||
1. `cargo run --release -p maps-validation`
|
||||
|
||||
Environment Variables:
|
||||
- ROBLOX_GROUP_ID
|
||||
- RBXCOOKIE
|
||||
- RBX_API_KEY
|
||||
- API_HOST_INTERNAL
|
||||
- NATS_HOST
|
||||
- LOAD_ASSET_VERSION_PLACE_ID
|
||||
- LOAD_ASSET_VERSION_UNIVERSE_ID
|
||||
|
||||
#### License
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ func main() {
|
||||
app := cmds.NewApp()
|
||||
app.Commands = []*cli.Command{
|
||||
cmds.NewServeCommand(),
|
||||
cmds.NewApiCommand(),
|
||||
}
|
||||
|
||||
if err := app.Run(os.Args); err != nil {
|
||||
|
||||
15
combobulator/Cargo.toml
Normal file
15
combobulator/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "maps-combobulator"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
async-nats = "0.45.0"
|
||||
aws-config = { version = "1", features = ["behavior-version-latest"] }
|
||||
aws-sdk-s3 = "1"
|
||||
map-tool = { version = "2.0.0", registry = "strafesnet" }
|
||||
rbx_asset = { version = "0.5.0", features = ["gzip", "rustls-tls"], default-features = false, registry = "strafesnet" }
|
||||
serde = { version = "1.0.215", features = ["derive"] }
|
||||
serde_json = "1.0.133"
|
||||
tokio = { version = "1.41.1", features = ["macros", "rt-multi-thread", "signal"] }
|
||||
tokio-stream = "0.1"
|
||||
3
combobulator/Containerfile
Normal file
3
combobulator/Containerfile
Normal file
@@ -0,0 +1,3 @@
|
||||
FROM alpine:3.21 AS runtime
|
||||
COPY /target/x86_64-unknown-linux-musl/release/maps-combobulator /
|
||||
ENTRYPOINT ["/maps-combobulator"]
|
||||
169
combobulator/src/main.rs
Normal file
169
combobulator/src/main.rs
Normal file
@@ -0,0 +1,169 @@
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
mod nats_types;
|
||||
mod process;
|
||||
mod s3;
|
||||
|
||||
const SUBJECT_MAPFIX_RELEASE:&str="maptest.mapfixes.release";
|
||||
const SUBJECT_SUBMISSION_BATCHRELEASE:&str="maptest.submissions.batchrelease";
|
||||
const SUBJECT_SUBMISSION_RELEASE:&str="maptest.combobulator.submissions.release";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum StartupError{
|
||||
NatsConnect(async_nats::ConnectError),
|
||||
NatsGetStream(async_nats::jetstream::context::GetStreamError),
|
||||
NatsConsumer(async_nats::jetstream::stream::ConsumerError),
|
||||
NatsConsumerUpdate(async_nats::jetstream::stream::ConsumerUpdateError),
|
||||
NatsStream(async_nats::jetstream::consumer::StreamError),
|
||||
}
|
||||
impl std::fmt::Display for StartupError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for StartupError{}
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
enum HandleMessageError{
|
||||
Json(serde_json::Error),
|
||||
UnknownSubject(String),
|
||||
Process(process::Error),
|
||||
Ack(async_nats::Error),
|
||||
Publish(async_nats::jetstream::context::PublishError),
|
||||
}
|
||||
impl std::fmt::Display for HandleMessageError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for HandleMessageError{}
|
||||
|
||||
fn from_slice<'a,T:serde::de::Deserialize<'a>>(slice:&'a [u8])->Result<T,HandleMessageError>{
|
||||
serde_json::from_slice(slice).map_err(HandleMessageError::Json)
|
||||
}
|
||||
|
||||
async fn handle_message(
|
||||
processor:&process::Processor,
|
||||
jetstream:&async_nats::jetstream::Context,
|
||||
message:async_nats::jetstream::Message,
|
||||
)->Result<(),HandleMessageError>{
|
||||
match message.subject.as_str(){
|
||||
SUBJECT_MAPFIX_RELEASE=>{
|
||||
let request:nats_types::ReleaseMapfixRequest=from_slice(&message.payload)?;
|
||||
processor.handle_mapfix_release(request).await.map_err(HandleMessageError::Process)?;
|
||||
message.ack().await.map_err(HandleMessageError::Ack)?;
|
||||
},
|
||||
SUBJECT_SUBMISSION_BATCHRELEASE=>{
|
||||
// split batch into individual messages and republish
|
||||
let batch:nats_types::ReleaseSubmissionsBatchRequest=from_slice(&message.payload)?;
|
||||
println!("[combobulator] Splitting batch release (operation {}, {} submissions)",
|
||||
batch.OperationID,batch.Submissions.len());
|
||||
for submission in batch.Submissions{
|
||||
let payload=serde_json::to_vec(&submission).map_err(HandleMessageError::Json)?;
|
||||
jetstream.publish(SUBJECT_SUBMISSION_RELEASE,payload.into())
|
||||
.await.map_err(HandleMessageError::Publish)?;
|
||||
println!("[combobulator] Published individual release for submission {}",submission.SubmissionID);
|
||||
}
|
||||
// ack the batch now that all individual messages are queued
|
||||
message.ack().await.map_err(HandleMessageError::Ack)?;
|
||||
},
|
||||
SUBJECT_SUBMISSION_RELEASE=>{
|
||||
let request:nats_types::ReleaseSubmissionRequest=from_slice(&message.payload)?;
|
||||
processor.handle_submission_release(request).await.map_err(HandleMessageError::Process)?;
|
||||
message.ack().await.map_err(HandleMessageError::Ack)?;
|
||||
},
|
||||
other=>return Err(HandleMessageError::UnknownSubject(other.to_owned())),
|
||||
}
|
||||
|
||||
println!("[combobulator] Message processed and acked");
|
||||
Ok(())
|
||||
}S
|
||||
|
||||
#[tokio::main]
|
||||
async fn main()->Result<(),StartupError>{
|
||||
// roblox cloud api for downloading models
|
||||
let api_key=std::env::var("RBX_API_KEY").expect("RBX_API_KEY env required");
|
||||
let cloud_context=rbx_asset::cloud::Context::new(rbx_asset::cloud::ApiKey::new(api_key));
|
||||
|
||||
// roblox cookie api for downloading assets (textures, meshes, unions)
|
||||
let cookie=std::env::var("RBXCOOKIE").expect("RBXCOOKIE env required");
|
||||
let cookie_context=rbx_asset::cookie::Context::new(rbx_asset::cookie::Cookie::new(cookie));
|
||||
|
||||
// s3
|
||||
let s3_bucket=std::env::var("S3_BUCKET").expect("S3_BUCKET env required");
|
||||
let s3_config=aws_config::load_defaults(aws_config::BehaviorVersion::latest()).await;
|
||||
let s3_client=aws_sdk_s3::Client::new(&s3_config);
|
||||
let s3_cache=s3::S3Cache::new(s3_client,s3_bucket);
|
||||
|
||||
let processor=process::Processor{
|
||||
cloud_context,
|
||||
cookie_context,
|
||||
s3:s3_cache,
|
||||
};
|
||||
|
||||
// nats
|
||||
let nats_host=std::env::var("NATS_HOST").expect("NATS_HOST env required");
|
||||
|
||||
const STREAM_NAME:&str="maptest";
|
||||
const DURABLE_NAME:&str="combobulator";
|
||||
|
||||
let filter_subjects=vec![
|
||||
SUBJECT_MAPFIX_RELEASE.to_owned(),
|
||||
SUBJECT_SUBMISSION_BATCHRELEASE.to_owned(),
|
||||
SUBJECT_SUBMISSION_RELEASE.to_owned(),
|
||||
];
|
||||
|
||||
let nats_config=async_nats::jetstream::consumer::pull::Config{
|
||||
name:Some(DURABLE_NAME.to_owned()),
|
||||
durable_name:Some(DURABLE_NAME.to_owned()),
|
||||
filter_subjects:filter_subjects.clone(),
|
||||
ack_wait:std::time::Duration::from_secs(900), // 15 minutes for processing
|
||||
max_deliver:5, // retry up to 5 times
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let nasty=async_nats::connect(nats_host).await.map_err(StartupError::NatsConnect)?;
|
||||
let jetstream=async_nats::jetstream::new(nasty);
|
||||
let stream=jetstream.get_stream(STREAM_NAME).await.map_err(StartupError::NatsGetStream)?;
|
||||
let consumer=stream.get_or_create_consumer(DURABLE_NAME,nats_config.clone()).await.map_err(StartupError::NatsConsumer)?;
|
||||
|
||||
// update consumer config if filter subjects changed
|
||||
if consumer.cached_info().config.filter_subjects!=filter_subjects{
|
||||
stream.update_consumer(nats_config).await.map_err(StartupError::NatsConsumerUpdate)?;
|
||||
}
|
||||
|
||||
let mut messages=consumer.messages().await.map_err(StartupError::NatsStream)?;
|
||||
|
||||
// SIGTERM graceful shutdown
|
||||
let mut sig_term=tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
|
||||
.expect("Failed to create SIGTERM signal listener");
|
||||
|
||||
println!("[combobulator] Started, waiting for messages...");
|
||||
|
||||
// sequential processing loop - one message at a time
|
||||
let main_loop=async{
|
||||
while let Some(message_result)=messages.next().await{
|
||||
match message_result{
|
||||
Ok(message)=>{
|
||||
match handle_message(&processor,&jetstream,message).await{
|
||||
Ok(())=>println!("[combobulator] Success"),
|
||||
Err(e)=>println!("[combobulator] Error: {e}"),
|
||||
}
|
||||
},
|
||||
Err(e)=>println!("[combobulator] Message stream error: {e}"),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
tokio::select!{
|
||||
_=sig_term.recv()=>{
|
||||
println!("[combobulator] Received SIGTERM, shutting down");
|
||||
},
|
||||
_=main_loop=>{
|
||||
println!("[combobulator] Message stream ended");
|
||||
},
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
29
combobulator/src/nats_types.rs
Normal file
29
combobulator/src/nats_types.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
#[expect(nonstandard_style,dead_code)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct ReleaseMapfixRequest{
|
||||
pub MapfixID:u64,
|
||||
pub ModelID:u64,
|
||||
pub ModelVersion:u64,
|
||||
pub TargetAssetID:u64,
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize,serde::Serialize)]
|
||||
pub struct ReleaseSubmissionRequest{
|
||||
pub SubmissionID:u64,
|
||||
pub ReleaseDate:i64,
|
||||
pub ModelID:u64,
|
||||
pub ModelVersion:u64,
|
||||
pub UploadedAssetID:u64,
|
||||
pub DisplayName:String,
|
||||
pub Creator:String,
|
||||
pub GameID:u32,
|
||||
pub Submitter:u64,
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct ReleaseSubmissionsBatchRequest{
|
||||
pub Submissions:Vec<ReleaseSubmissionRequest>,
|
||||
pub OperationID:u32,
|
||||
}
|
||||
144
combobulator/src/process.rs
Normal file
144
combobulator/src/process.rs
Normal file
@@ -0,0 +1,144 @@
|
||||
use crate::nats_types::ReleaseMapfixRequest;
|
||||
use crate::s3::S3Cache;
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
Download(rbx_asset::cloud::GetError),
|
||||
NonFreeModel,
|
||||
GetAssets(map_tool::roblox::UniqueAssetError),
|
||||
DownloadAsset(map_tool::roblox::DownloadAssetError),
|
||||
ConvertTexture(map_tool::roblox::ConvertTextureError),
|
||||
ConvertSnf(map_tool::roblox::ConvertError),
|
||||
S3Get(crate::s3::GetError),
|
||||
S3Put(crate::s3::PutError),
|
||||
}
|
||||
impl std::fmt::Display for Error{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for Error{}
|
||||
|
||||
pub struct Processor{
|
||||
pub cloud_context:rbx_asset::cloud::Context,
|
||||
pub cookie_context:rbx_asset::cookie::Context,
|
||||
pub s3:S3Cache,
|
||||
}
|
||||
|
||||
impl Processor{
|
||||
/// Download a model version from Roblox cloud API.
|
||||
async fn download_model(&self,model_id:u64,model_version:u64)->Result<Vec<u8>,Error>{
|
||||
let location=self.cloud_context.get_asset_version_location(
|
||||
rbx_asset::cloud::GetAssetVersionRequest{
|
||||
asset_id:model_id,
|
||||
version:model_version,
|
||||
}
|
||||
).await.map_err(Error::Download)?;
|
||||
|
||||
let location=location.location.ok_or(Error::NonFreeModel)?;
|
||||
|
||||
let maybe_gzip=self.cloud_context.get_asset(&location).await.map_err(Error::Download)?;
|
||||
|
||||
Ok(maybe_gzip.into_inner().to_vec())
|
||||
}
|
||||
|
||||
/// Process a single model: extract assets, cache to S3, build SNF.
|
||||
async fn process_model(&self,model_id:u64,model_version:u64)->Result<(),Error>{
|
||||
println!("[combobulator] Downloading model {model_id} v{model_version}");
|
||||
let rbxl_bytes=self.download_model(model_id,model_version).await?;
|
||||
|
||||
// extract unique assets from the file
|
||||
let assets=map_tool::roblox::get_unique_assets_from_file(&rbxl_bytes)
|
||||
.map_err(Error::GetAssets)?;
|
||||
|
||||
// process textures: download, cache, convert to DDS
|
||||
for id in &assets.textures{
|
||||
let asset_id=id.0;
|
||||
let dds_key=S3Cache::texture_dds_key(asset_id);
|
||||
|
||||
// skip if DDS already cached
|
||||
if self.s3.get(&dds_key).await.map_err(Error::S3Get)?.is_some(){
|
||||
println!("[combobulator] Texture {asset_id} already cached, skipping");
|
||||
continue;
|
||||
}
|
||||
|
||||
// check raw cache, download if missing
|
||||
let raw_key=S3Cache::texture_raw_key(asset_id);
|
||||
let raw_data=match self.s3.get(&raw_key).await.map_err(Error::S3Get)?{
|
||||
Some(cached)=>cached,
|
||||
None=>{
|
||||
println!("[combobulator] Downloading texture {asset_id}");
|
||||
let data=map_tool::roblox::download_asset(&self.cookie_context,asset_id)
|
||||
.await.map_err(Error::DownloadAsset)?;
|
||||
self.s3.put(&raw_key,data.clone()).await.map_err(Error::S3Put)?;
|
||||
data
|
||||
},
|
||||
};
|
||||
|
||||
// convert to DDS and upload
|
||||
let dds=map_tool::roblox::convert_texture_to_dds(&raw_data)
|
||||
.map_err(Error::ConvertTexture)?;
|
||||
self.s3.put(&dds_key,dds).await.map_err(Error::S3Put)?;
|
||||
println!("[combobulator] Texture {asset_id} processed");
|
||||
}
|
||||
|
||||
// process meshes
|
||||
for id in &assets.meshes{
|
||||
let asset_id=id.0;
|
||||
let mesh_key=S3Cache::mesh_key(asset_id);
|
||||
|
||||
if self.s3.get(&mesh_key).await.map_err(Error::S3Get)?.is_some(){
|
||||
println!("[combobulator] Mesh {asset_id} already cached, skipping");
|
||||
continue;
|
||||
}
|
||||
|
||||
println!("[combobulator] Downloading mesh {asset_id}");
|
||||
let data=map_tool::roblox::download_asset(&self.cookie_context,asset_id)
|
||||
.await.map_err(Error::DownloadAsset)?;
|
||||
self.s3.put(&mesh_key,data).await.map_err(Error::S3Put)?;
|
||||
println!("[combobulator] Mesh {asset_id} processed");
|
||||
}
|
||||
|
||||
// process unions
|
||||
for id in &assets.unions{
|
||||
let asset_id=id.0;
|
||||
let union_key=S3Cache::union_key(asset_id);
|
||||
|
||||
if self.s3.get(&union_key).await.map_err(Error::S3Get)?.is_some(){
|
||||
println!("[combobulator] Union {asset_id} already cached, skipping");
|
||||
continue;
|
||||
}
|
||||
|
||||
println!("[combobulator] Downloading union {asset_id}");
|
||||
let data=map_tool::roblox::download_asset(&self.cookie_context,asset_id)
|
||||
.await.map_err(Error::DownloadAsset)?;
|
||||
self.s3.put(&union_key,data).await.map_err(Error::S3Put)?;
|
||||
println!("[combobulator] Union {asset_id} processed");
|
||||
}
|
||||
|
||||
// convert to SNF and upload
|
||||
println!("[combobulator] Converting to SNF");
|
||||
let output=map_tool::roblox::convert_to_snf(&rbxl_bytes)
|
||||
.map_err(Error::ConvertSnf)?;
|
||||
let snf_key=S3Cache::snf_key(model_id,model_version);
|
||||
self.s3.put(&snf_key,output.snf).await.map_err(Error::S3Put)?;
|
||||
println!("[combobulator] SNF uploaded to {snf_key}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Handle a mapfix release message.
|
||||
pub async fn handle_mapfix_release(&self,request:ReleaseMapfixRequest)->Result<(),Error>{
|
||||
println!("[combobulator] Processing mapfix {} (model {} v{})",
|
||||
request.MapfixID,request.ModelID,request.ModelVersion);
|
||||
self.process_model(request.ModelID,request.ModelVersion).await
|
||||
}
|
||||
|
||||
/// Handle an individual submission release message.
|
||||
pub async fn handle_submission_release(&self,request:crate::nats_types::ReleaseSubmissionRequest)->Result<(),Error>{
|
||||
println!("[combobulator] Processing submission {} (model {} v{})",
|
||||
request.SubmissionID,request.ModelID,request.ModelVersion);
|
||||
self.process_model(request.ModelID,request.ModelVersion).await
|
||||
}
|
||||
}
|
||||
96
combobulator/src/s3.rs
Normal file
96
combobulator/src/s3.rs
Normal file
@@ -0,0 +1,96 @@
|
||||
use aws_sdk_s3::Client;
|
||||
use aws_sdk_s3::primitives::ByteStream;
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum GetError{
|
||||
Get(aws_sdk_s3::error::SdkError<aws_sdk_s3::operation::get_object::GetObjectError>),
|
||||
Collect(aws_sdk_s3::primitives::ByteStreamError),
|
||||
}
|
||||
impl std::fmt::Display for GetError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for GetError{}
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum PutError{
|
||||
Put(aws_sdk_s3::error::SdkError<aws_sdk_s3::operation::put_object::PutObjectError>),
|
||||
}
|
||||
impl std::fmt::Display for PutError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for PutError{}
|
||||
|
||||
pub struct S3Cache{
|
||||
client:Client,
|
||||
bucket:String,
|
||||
}
|
||||
|
||||
impl S3Cache{
|
||||
pub fn new(client:Client,bucket:String)->Self{
|
||||
Self{client,bucket}
|
||||
}
|
||||
|
||||
/// Try to get a cached object. Returns None if the key doesn't exist.
|
||||
pub async fn get(&self,key:&str)->Result<Option<Vec<u8>>,GetError>{
|
||||
match self.client.get_object()
|
||||
.bucket(&self.bucket)
|
||||
.key(key)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(output)=>{
|
||||
let bytes=output.body.collect().await.map_err(GetError::Collect)?;
|
||||
Ok(Some(bytes.to_vec()))
|
||||
},
|
||||
Err(e)=>{
|
||||
// check if it's a NoSuchKey error
|
||||
if let aws_sdk_s3::error::SdkError::ServiceError(ref service_err)=e{
|
||||
if service_err.err().is_no_such_key(){
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
Err(GetError::Get(e))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Put an object into S3.
|
||||
pub async fn put(&self,key:&str,data:Vec<u8>)->Result<(),PutError>{
|
||||
self.client.put_object()
|
||||
.bucket(&self.bucket)
|
||||
.key(key)
|
||||
.body(ByteStream::from(data))
|
||||
.send()
|
||||
.await
|
||||
.map_err(PutError::Put)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// S3 key helpers
|
||||
|
||||
pub fn texture_raw_key(asset_id:u64)->String{
|
||||
format!("assets/textures/{asset_id}.raw")
|
||||
}
|
||||
|
||||
pub fn texture_dds_key(asset_id:u64)->String{
|
||||
format!("assets/textures/{asset_id}.dds")
|
||||
}
|
||||
|
||||
pub fn mesh_key(asset_id:u64)->String{
|
||||
format!("assets/meshes/{asset_id}")
|
||||
}
|
||||
|
||||
pub fn union_key(asset_id:u64)->String{
|
||||
format!("assets/unions/{asset_id}")
|
||||
}
|
||||
|
||||
pub fn snf_key(model_id:u64,model_version:u64)->String{
|
||||
format!("maps/{model_id}/v{model_version}/map.snfm")
|
||||
}
|
||||
}
|
||||
10
compose.yaml
10
compose.yaml
@@ -34,7 +34,7 @@ services:
|
||||
"--data-rpc-host","dataservice:9000",
|
||||
]
|
||||
env_file:
|
||||
- ../auth-compose/strafesnet_staging.env
|
||||
- /home/quat/auth-compose/strafesnet_staging.env
|
||||
depends_on:
|
||||
- authrpc
|
||||
- nats
|
||||
@@ -59,11 +59,13 @@ services:
|
||||
maptest-validator
|
||||
container_name: validation
|
||||
env_file:
|
||||
- ../auth-compose/strafesnet_staging.env
|
||||
- /home/quat/auth-compose/strafesnet_staging.env
|
||||
environment:
|
||||
- ROBLOX_GROUP_ID=17032139 # "None" is special case string value
|
||||
- API_HOST_INTERNAL=http://submissions:8083/v1
|
||||
- NATS_HOST=nats:4222
|
||||
- LOAD_ASSET_VERSION_PLACE_ID=14001440964
|
||||
- LOAD_ASSET_VERSION_UNIVERSE_ID=4850603885
|
||||
depends_on:
|
||||
- nats
|
||||
# note: this races the submissions which creates a nats stream
|
||||
@@ -103,7 +105,7 @@ services:
|
||||
- REDIS_ADDR=authredis:6379
|
||||
- RBX_GROUP_ID=17032139
|
||||
env_file:
|
||||
- ../auth-compose/auth-service.env
|
||||
- /home/quat/auth-compose/auth-service.env
|
||||
depends_on:
|
||||
- authredis
|
||||
networks:
|
||||
@@ -117,7 +119,7 @@ services:
|
||||
environment:
|
||||
- REDIS_ADDR=authredis:6379
|
||||
env_file:
|
||||
- ../auth-compose/auth-service.env
|
||||
- /home/quat/auth-compose/auth-service.env
|
||||
depends_on:
|
||||
- authredis
|
||||
networks:
|
||||
|
||||
@@ -230,7 +230,7 @@ var SwaggerInfo = &swag.Spec{
|
||||
BasePath: "/public-api/v1",
|
||||
Schemes: []string{},
|
||||
Title: "StrafesNET Maps API",
|
||||
Description: "Obtain an api key at https://dev.strafes.net\nRequires Data:Read permission",
|
||||
Description: "Obtain an api key at https://dev.strafes.net\nRequires Maps:Read permission",
|
||||
InfoInstanceName: "swagger",
|
||||
SwaggerTemplate: docTemplate,
|
||||
LeftDelim: "{{",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"swagger": "2.0",
|
||||
"info": {
|
||||
"description": "Obtain an api key at https://dev.strafes.net\nRequires Data:Read permission",
|
||||
"description": "Obtain an api key at https://dev.strafes.net\nRequires Maps:Read permission",
|
||||
"title": "StrafesNET Maps API",
|
||||
"contact": {},
|
||||
"version": "1.0"
|
||||
|
||||
@@ -64,7 +64,7 @@ info:
|
||||
contact: {}
|
||||
description: |-
|
||||
Obtain an api key at https://dev.strafes.net
|
||||
Requires Data:Read permission
|
||||
Requires Maps:Read permission
|
||||
title: StrafesNET Maps API
|
||||
version: "1.0"
|
||||
paths:
|
||||
|
||||
47
go.mod
47
go.mod
@@ -6,22 +6,23 @@ toolchain go1.24.5
|
||||
|
||||
require (
|
||||
git.itzana.me/StrafesNET/dev-service v0.0.0-20250628052121-92af8193b5ed
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20250807005013-301d35b914ef
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20251228204118-c20dbb42afec
|
||||
git.itzana.me/strafesnet/utils v0.0.0-20220716194944-d8ca164052f9
|
||||
github.com/dchest/siphash v1.2.3
|
||||
github.com/gin-gonic/gin v1.10.1
|
||||
github.com/go-faster/errors v0.7.1
|
||||
github.com/go-faster/jx v1.1.0
|
||||
github.com/go-faster/jx v1.2.0
|
||||
github.com/nats-io/nats.go v1.37.0
|
||||
github.com/ogen-go/ogen v1.2.1
|
||||
github.com/ogen-go/ogen v1.18.0
|
||||
github.com/redis/go-redis/v9 v9.10.0
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/swaggo/files v1.0.1
|
||||
github.com/swaggo/gin-swagger v1.6.0
|
||||
github.com/swaggo/swag v1.16.6
|
||||
github.com/urfave/cli/v2 v2.27.6
|
||||
go.opentelemetry.io/otel v1.32.0
|
||||
go.opentelemetry.io/otel/metric v1.32.0
|
||||
go.opentelemetry.io/otel/trace v1.32.0
|
||||
go.opentelemetry.io/otel v1.39.0
|
||||
go.opentelemetry.io/otel/metric v1.39.0
|
||||
go.opentelemetry.io/otel/trace v1.39.0
|
||||
google.golang.org/grpc v1.48.0
|
||||
gorm.io/driver/postgres v1.6.0
|
||||
gorm.io/gorm v1.25.12
|
||||
@@ -33,9 +34,11 @@ require (
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
|
||||
github.com/bytedance/sonic v1.11.6 // indirect
|
||||
github.com/bytedance/sonic/loader v0.1.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.4 // indirect
|
||||
github.com/cloudwego/iasm v0.2.0 // indirect
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
|
||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.19.5 // indirect
|
||||
@@ -55,7 +58,7 @@ require (
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.17.6 // indirect
|
||||
github.com/klauspost/compress v1.18.1 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mailru/easyjson v0.7.6 // indirect
|
||||
@@ -65,36 +68,38 @@ require (
|
||||
github.com/nats-io/nuid v1.0.1 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
golang.org/x/arch v0.8.0 // indirect
|
||||
golang.org/x/crypto v0.32.0 // indirect
|
||||
golang.org/x/mod v0.17.0 // indirect
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect
|
||||
golang.org/x/crypto v0.46.0 // indirect
|
||||
golang.org/x/mod v0.31.0 // indirect
|
||||
golang.org/x/tools v0.40.0 // indirect
|
||||
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 // indirect
|
||||
google.golang.org/protobuf v1.34.1 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/dlclark/regexp2 v1.11.0 // indirect
|
||||
github.com/fatih/color v1.17.0 // indirect
|
||||
github.com/dlclark/regexp2 v1.11.5 // indirect
|
||||
github.com/fatih/color v1.18.0 // indirect
|
||||
github.com/ghodss/yaml v1.0.0 // indirect
|
||||
github.com/go-faster/yaml v0.4.6 // indirect
|
||||
github.com/go-logr/logr v1.4.2 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
// github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/segmentio/asm v1.2.0 // indirect
|
||||
github.com/segmentio/asm v1.2.1 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
go.uber.org/zap v1.27.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20240531132922-fd00a4e0eefc // indirect
|
||||
golang.org/x/net v0.34.0 // indirect
|
||||
golang.org/x/sync v0.12.0 // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
golang.org/x/text v0.23.0 // indirect
|
||||
go.uber.org/zap v1.27.1 // indirect
|
||||
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 // indirect
|
||||
golang.org/x/net v0.48.0 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.39.0 // indirect
|
||||
golang.org/x/text v0.32.0 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
)
|
||||
|
||||
107
go.sum
107
go.sum
@@ -2,8 +2,8 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMT
|
||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
git.itzana.me/StrafesNET/dev-service v0.0.0-20250628052121-92af8193b5ed h1:eGWIQx2AOrSsLC2dieuSs8MCliRE60tvpZnmxsTBtKc=
|
||||
git.itzana.me/StrafesNET/dev-service v0.0.0-20250628052121-92af8193b5ed/go.mod h1:KJal0K++M6HEzSry6JJ2iDPZtOQn5zSstNlDbU3X4Jg=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20250807005013-301d35b914ef h1:SJi4V4+xzScFnbMRN1gkZxcqR1xKfiT7CaXanLltEzw=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20250807005013-301d35b914ef/go.mod h1:X7XTRUScRkBWq8q8bplbeso105RPDlnY7J6Wy1IwBMs=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20251228204118-c20dbb42afec h1:JSar9If1kzb02+Erp+zmSqHKWPPP2NqMQVK15pRmkLE=
|
||||
git.itzana.me/strafesnet/go-grpc v0.0.0-20251228204118-c20dbb42afec/go.mod h1:X7XTRUScRkBWq8q8bplbeso105RPDlnY7J6Wy1IwBMs=
|
||||
git.itzana.me/strafesnet/utils v0.0.0-20220716194944-d8ca164052f9 h1:7lU6jyR7S7Rhh1dnUp7GyIRHUTBXZagw8F4n4hOyxLw=
|
||||
git.itzana.me/strafesnet/utils v0.0.0-20220716194944-d8ca164052f9/go.mod h1:uyYerSieEt4v0MJCdPLppG0LtJ4Yj035vuTetWGsxjY=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
@@ -14,12 +14,18 @@ github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbt
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
|
||||
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
|
||||
github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
|
||||
github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
|
||||
github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
|
||||
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
|
||||
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
||||
@@ -39,16 +45,18 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dchest/siphash v1.2.3 h1:QXwFc8cFOR2dSa/gE6o/HokBMWtLUaNDVd+22aKHeEA=
|
||||
github.com/dchest/siphash v1.2.3/go.mod h1:0NvQU092bT0ipiFN++/rXm69QG9tVxLAlQHIXMPAkHc=
|
||||
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
|
||||
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ=
|
||||
github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||
github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
|
||||
github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
|
||||
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
|
||||
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
||||
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
|
||||
@@ -61,13 +69,13 @@ github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ=
|
||||
github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
||||
github.com/go-faster/errors v0.7.1 h1:MkJTnDoEdi9pDabt1dpWf7AA8/BaSYZqibYyhZ20AYg=
|
||||
github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7FPGZP2quo=
|
||||
github.com/go-faster/jx v1.1.0 h1:ZsW3wD+snOdmTDy9eIVgQdjUpXRRV4rqW8NS3t+20bg=
|
||||
github.com/go-faster/jx v1.1.0/go.mod h1:vKDNikrKoyUmpzaJ0OkIkRQClNHFX/nF3dnTJZb3skg=
|
||||
github.com/go-faster/jx v1.2.0 h1:T2YHJPrFaYu21fJtUxC9GzmluKu8rVIFDwwGBKTDseI=
|
||||
github.com/go-faster/jx v1.2.0/go.mod h1:UWLOVDmMG597a5tBFPLIWJdUxz5/2emOpfsj9Neg0PE=
|
||||
github.com/go-faster/yaml v0.4.6 h1:lOK/EhI04gCpPgPhgt0bChS6bvw7G3WwI8xxVe0sw9I=
|
||||
github.com/go-faster/yaml v0.4.6/go.mod h1:390dRIvV4zbnO7qC9FGo6YYutc+wyyUSHBgbXL52eXk=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
|
||||
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
||||
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||
@@ -113,8 +121,8 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
@@ -138,8 +146,8 @@ github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8Hm
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI=
|
||||
github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||
github.com/klauspost/compress v1.18.1 h1:bcSGx7UbpBqMChDtsF28Lw6v/G94LPrrbMbdC3JH2co=
|
||||
github.com/klauspost/compress v1.18.1/go.mod h1:ZQFFVG+MdnR0P+l6wpXgIL4NTtwiKIdBnrBd8Nrxr+0=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
|
||||
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
@@ -157,9 +165,8 @@ github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN
|
||||
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||
github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA=
|
||||
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
||||
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
@@ -174,20 +181,24 @@ github.com/nats-io/nkeys v0.4.7/go.mod h1:kqXRgRDPlGy7nGaEDMuYzmiJCIAAWDK0IMBtDm
|
||||
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
|
||||
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/ogen-go/ogen v1.2.1 h1:C5A0lvUMu2wl+eWIxnpXMWnuOJ26a2FyzR1CIC2qG0M=
|
||||
github.com/ogen-go/ogen v1.2.1/go.mod h1:P2zQdEu8UqaVRfD5GEFvl+9q63VjMLvDquq1wVbyInM=
|
||||
github.com/ogen-go/ogen v1.18.0 h1:6RQ7lFBjOeNaUWu4getfqIh4GJbEY4hqKuzDtec/g60=
|
||||
github.com/ogen-go/ogen v1.18.0/go.mod h1:dHFr2Wf6cA7tSxMI+zPC21UR5hAlDw8ZYUkK3PziURY=
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
||||
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/redis/go-redis/v9 v9.10.0 h1:FxwK3eV8p/CQa0Ch276C7u2d0eNC9kCmAYQ7mCXCzVs=
|
||||
github.com/redis/go-redis/v9 v9.10.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
|
||||
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
|
||||
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
|
||||
github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
@@ -204,8 +215,9 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE=
|
||||
github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg=
|
||||
github.com/swaggo/gin-swagger v1.6.0 h1:y8sxvQ3E20/RCyrXeFfg60r6H0Z+SwpTjMYsMm+zy8M=
|
||||
@@ -221,36 +233,38 @@ github.com/urfave/cli/v2 v2.27.6/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5
|
||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4=
|
||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.opentelemetry.io/otel v1.32.0 h1:WnBN+Xjcteh0zdk01SVqV55d/m62NJLJdIyb4y/WO5U=
|
||||
go.opentelemetry.io/otel v1.32.0/go.mod h1:00DCVSB0RQcnzlwyTfqtxSm+DRr9hpYrHjNGiBHVQIg=
|
||||
go.opentelemetry.io/otel/metric v1.32.0 h1:xV2umtmNcThh2/a/aCP+h64Xx5wsj8qqnkYZktzNa0M=
|
||||
go.opentelemetry.io/otel/metric v1.32.0/go.mod h1:jH7CIbbK6SH2V2wE16W05BHCtIDzauciCRLoc/SyMv8=
|
||||
go.opentelemetry.io/otel/trace v1.32.0 h1:WIC9mYrXf8TmY/EXuULKc8hR17vE+Hjv2cssQDe03fM=
|
||||
go.opentelemetry.io/otel/trace v1.32.0/go.mod h1:+i4rkvCraA+tG6AzwloGaCtkx53Fa+L+V8e9a7YvhT8=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
|
||||
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
|
||||
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
|
||||
go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0=
|
||||
go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs=
|
||||
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
||||
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
||||
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
|
||||
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
|
||||
go.uber.org/zap v1.27.1 h1:08RqriUEv8+ArZRYSTXy1LeBScaMpVSTBhCeaZYfMYc=
|
||||
go.uber.org/zap v1.27.1/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
|
||||
golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
|
||||
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
|
||||
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
|
||||
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20240531132922-fd00a4e0eefc h1:O9NuF4s+E/PvMIy+9IUZB9znFwUIXEWSstNjek6VpVg=
|
||||
golang.org/x/exp v0.0.0-20240531132922-fd00a4e0eefc/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc=
|
||||
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 h1:fQsdNF2N+/YewlRZiricy4P1iimyPKZ/xwniHj8Q2a0=
|
||||
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
||||
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@@ -264,8 +278,8 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
|
||||
golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
||||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
||||
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -273,8 +287,8 @@ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
|
||||
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -288,11 +302,10 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
|
||||
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
@@ -301,8 +314,8 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
|
||||
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
@@ -310,8 +323,8 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
|
||||
379
openapi.yaml
379
openapi.yaml
@@ -14,15 +14,41 @@ tags:
|
||||
description: Long-running operations
|
||||
- name: Session
|
||||
description: Session queries
|
||||
- name: Stats
|
||||
description: Statistics queries
|
||||
- name: Submissions
|
||||
description: Submission operations
|
||||
- name: Scripts
|
||||
description: Script operations
|
||||
- name: ScriptPolicy
|
||||
description: Script policy operations
|
||||
- name: Thumbnails
|
||||
description: Thumbnail operations
|
||||
- name: Users
|
||||
description: User operations
|
||||
security:
|
||||
- cookieAuth: []
|
||||
paths:
|
||||
/stats:
|
||||
get:
|
||||
summary: Get aggregate statistics
|
||||
operationId: getStats
|
||||
tags:
|
||||
- Stats
|
||||
security: []
|
||||
responses:
|
||||
"200":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Stats"
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/session/user:
|
||||
get:
|
||||
summary: Get information about the currently logged in user
|
||||
@@ -244,6 +270,12 @@ paths:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
- name: AssetVersion
|
||||
in: query
|
||||
schema:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
- name: TargetAssetID
|
||||
in: query
|
||||
schema:
|
||||
@@ -415,6 +447,30 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/mapfixes/{MapfixID}/description:
|
||||
patch:
|
||||
summary: Update description (submitter only)
|
||||
operationId: updateMapfixDescription
|
||||
tags:
|
||||
- Mapfixes
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/MapfixID'
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
text/plain:
|
||||
schema:
|
||||
type: string
|
||||
maxLength: 256
|
||||
responses:
|
||||
"204":
|
||||
description: Successful response
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/mapfixes/{MapfixID}/completed:
|
||||
post:
|
||||
summary: Called by maptest when a player completes the map
|
||||
@@ -587,7 +643,7 @@ paths:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/mapfixes/{MapfixID}/status/trigger-upload:
|
||||
post:
|
||||
summary: Role Admin changes status from Validated -> Uploading
|
||||
summary: Role MapfixUpload changes status from Validated -> Uploading
|
||||
operationId: actionMapfixTriggerUpload
|
||||
tags:
|
||||
- Mapfixes
|
||||
@@ -604,7 +660,7 @@ paths:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/mapfixes/{MapfixID}/status/reset-uploading:
|
||||
post:
|
||||
summary: Role Admin manually resets uploading softlock and changes status from Uploading -> Validated
|
||||
summary: Role MapfixUpload manually resets uploading softlock and changes status from Uploading -> Validated
|
||||
operationId: actionMapfixValidated
|
||||
tags:
|
||||
- Mapfixes
|
||||
@@ -619,6 +675,40 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/mapfixes/{MapfixID}/status/trigger-release:
|
||||
post:
|
||||
summary: Role MapfixUpload changes status from Uploaded -> Releasing
|
||||
operationId: actionMapfixTriggerRelease
|
||||
tags:
|
||||
- Mapfixes
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/MapfixID'
|
||||
responses:
|
||||
"204":
|
||||
description: Successful response
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/mapfixes/{MapfixID}/status/reset-releasing:
|
||||
post:
|
||||
summary: Role MapfixUpload manually resets releasing softlock and changes status from Releasing -> Uploaded
|
||||
operationId: actionMapfixUploaded
|
||||
tags:
|
||||
- Mapfixes
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/MapfixID'
|
||||
responses:
|
||||
"204":
|
||||
description: Successful response
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/operations/{OperationID}:
|
||||
get:
|
||||
summary: Retrieve operation with ID
|
||||
@@ -698,6 +788,12 @@ paths:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
- name: AssetVersion
|
||||
in: query
|
||||
schema:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
- name: UploadedAssetID
|
||||
in: query
|
||||
schema:
|
||||
@@ -1067,7 +1163,7 @@ paths:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/submissions/{SubmissionID}/status/trigger-upload:
|
||||
post:
|
||||
summary: Role Admin changes status from Validated -> Uploading
|
||||
summary: Role SubmissionUpload changes status from Validated -> Uploading
|
||||
operationId: actionSubmissionTriggerUpload
|
||||
tags:
|
||||
- Submissions
|
||||
@@ -1084,7 +1180,7 @@ paths:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/submissions/{SubmissionID}/status/reset-uploading:
|
||||
post:
|
||||
summary: Role Admin manually resets uploading softlock and changes status from Uploading -> Validated
|
||||
summary: Role SubmissionUpload manually resets uploading softlock and changes status from Uploading -> Validated
|
||||
operationId: actionSubmissionValidated
|
||||
tags:
|
||||
- Submissions
|
||||
@@ -1101,7 +1197,7 @@ paths:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/release-submissions:
|
||||
post:
|
||||
summary: Release a set of uploaded maps
|
||||
summary: Release a set of uploaded maps. Role SubmissionRelease
|
||||
operationId: releaseSubmissions
|
||||
tags:
|
||||
- Submissions
|
||||
@@ -1118,6 +1214,10 @@ paths:
|
||||
responses:
|
||||
"201":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/OperationID"
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
@@ -1388,6 +1488,222 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/thumbnails/assets:
|
||||
post:
|
||||
summary: Batch fetch asset thumbnails
|
||||
operationId: batchAssetThumbnails
|
||||
tags:
|
||||
- Thumbnails
|
||||
security: []
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- assetIds
|
||||
properties:
|
||||
assetIds:
|
||||
type: array
|
||||
items:
|
||||
type: integer
|
||||
format: uint64
|
||||
maxItems: 100
|
||||
description: Array of asset IDs (max 100)
|
||||
size:
|
||||
type: string
|
||||
enum:
|
||||
- "150x150"
|
||||
- "420x420"
|
||||
- "768x432"
|
||||
default: "420x420"
|
||||
description: Thumbnail size
|
||||
responses:
|
||||
"200":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
thumbnails:
|
||||
type: object
|
||||
additionalProperties:
|
||||
type: string
|
||||
description: Map of asset ID to thumbnail URL
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/thumbnails/asset/{AssetID}:
|
||||
get:
|
||||
summary: Get single asset thumbnail
|
||||
operationId: getAssetThumbnail
|
||||
tags:
|
||||
- Thumbnails
|
||||
security: []
|
||||
parameters:
|
||||
- name: AssetID
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
- name: size
|
||||
in: query
|
||||
schema:
|
||||
type: string
|
||||
enum:
|
||||
- "150x150"
|
||||
- "420x420"
|
||||
- "768x432"
|
||||
default: "420x420"
|
||||
responses:
|
||||
"302":
|
||||
description: Redirect to thumbnail URL
|
||||
headers:
|
||||
Location:
|
||||
description: URL to redirect to
|
||||
schema:
|
||||
type: string
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/thumbnails/users:
|
||||
post:
|
||||
summary: Batch fetch user avatar thumbnails
|
||||
operationId: batchUserThumbnails
|
||||
tags:
|
||||
- Thumbnails
|
||||
security: []
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- userIds
|
||||
properties:
|
||||
userIds:
|
||||
type: array
|
||||
items:
|
||||
type: integer
|
||||
format: uint64
|
||||
maxItems: 100
|
||||
description: Array of user IDs (max 100)
|
||||
size:
|
||||
type: string
|
||||
enum:
|
||||
- "150x150"
|
||||
- "420x420"
|
||||
- "768x432"
|
||||
default: "150x150"
|
||||
description: Thumbnail size
|
||||
responses:
|
||||
"200":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
thumbnails:
|
||||
type: object
|
||||
additionalProperties:
|
||||
type: string
|
||||
description: Map of user ID to thumbnail URL
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/thumbnails/user/{UserID}:
|
||||
get:
|
||||
summary: Get single user avatar thumbnail
|
||||
operationId: getUserThumbnail
|
||||
tags:
|
||||
- Thumbnails
|
||||
security: []
|
||||
parameters:
|
||||
- name: UserID
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
- name: size
|
||||
in: query
|
||||
schema:
|
||||
type: string
|
||||
enum:
|
||||
- "150x150"
|
||||
- "420x420"
|
||||
- "768x432"
|
||||
default: "150x150"
|
||||
responses:
|
||||
"302":
|
||||
description: Redirect to thumbnail URL
|
||||
headers:
|
||||
Location:
|
||||
description: URL to redirect to
|
||||
schema:
|
||||
type: string
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/usernames:
|
||||
post:
|
||||
summary: Batch fetch usernames
|
||||
operationId: batchUsernames
|
||||
tags:
|
||||
- Users
|
||||
security: []
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- userIds
|
||||
properties:
|
||||
userIds:
|
||||
type: array
|
||||
items:
|
||||
type: integer
|
||||
format: uint64
|
||||
maxItems: 100
|
||||
description: Array of user IDs (max 100)
|
||||
responses:
|
||||
"200":
|
||||
description: Successful response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
usernames:
|
||||
type: object
|
||||
additionalProperties:
|
||||
type: string
|
||||
description: Map of user ID to username
|
||||
default:
|
||||
description: General Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
components:
|
||||
securitySchemes:
|
||||
cookieAuth:
|
||||
@@ -1624,6 +1940,8 @@ components:
|
||||
- Submitter
|
||||
- AssetID
|
||||
- AssetVersion
|
||||
# - ValidatedAssetID
|
||||
# - ValidatedAssetVersion
|
||||
- Completed
|
||||
- TargetAssetID
|
||||
- StatusID
|
||||
@@ -1664,6 +1982,14 @@ components:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
ValidatedAssetID:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
ValidatedAssetVersion:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
Completed:
|
||||
type: boolean
|
||||
TargetAssetID:
|
||||
@@ -1902,7 +2228,7 @@ components:
|
||||
properties:
|
||||
Name:
|
||||
type: string
|
||||
maxLength: 128
|
||||
maxLength: 256
|
||||
Source:
|
||||
type: string
|
||||
maxLength: 1048576
|
||||
@@ -2001,6 +2327,47 @@ components:
|
||||
type: integer
|
||||
format: int32
|
||||
minimum: 0
|
||||
Stats:
|
||||
description: Aggregate statistics for submissions and mapfixes
|
||||
type: object
|
||||
properties:
|
||||
TotalSubmissions:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
description: Total number of submissions
|
||||
TotalMapfixes:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
description: Total number of mapfixes
|
||||
ReleasedSubmissions:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
description: Number of released submissions
|
||||
ReleasedMapfixes:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
description: Number of released mapfixes
|
||||
SubmittedSubmissions:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
description: Number of submissions under review
|
||||
SubmittedMapfixes:
|
||||
type: integer
|
||||
format: int64
|
||||
minimum: 0
|
||||
description: Number of mapfixes under review
|
||||
required:
|
||||
- TotalSubmissions
|
||||
- TotalMapfixes
|
||||
- ReleasedSubmissions
|
||||
- ReleasedMapfixes
|
||||
- SubmittedSubmissions
|
||||
- SubmittedMapfixes
|
||||
Error:
|
||||
description: Represents error object
|
||||
type: object
|
||||
|
||||
@@ -5,14 +5,14 @@ package api
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"go.opentelemetry.io/otel"
|
||||
"go.opentelemetry.io/otel/metric"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
|
||||
ht "github.com/ogen-go/ogen/http"
|
||||
"github.com/ogen-go/ogen/middleware"
|
||||
"github.com/ogen-go/ogen/ogenerrors"
|
||||
"github.com/ogen-go/ogen/otelogen"
|
||||
"go.opentelemetry.io/otel"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/metric"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -32,6 +32,7 @@ type otelConfig struct {
|
||||
Tracer trace.Tracer
|
||||
MeterProvider metric.MeterProvider
|
||||
Meter metric.Meter
|
||||
Attributes []attribute.KeyValue
|
||||
}
|
||||
|
||||
func (cfg *otelConfig) initOTEL() {
|
||||
@@ -215,6 +216,13 @@ func WithMeterProvider(provider metric.MeterProvider) Option {
|
||||
})
|
||||
}
|
||||
|
||||
// WithAttributes specifies default otel attributes.
|
||||
func WithAttributes(attributes ...attribute.KeyValue) Option {
|
||||
return otelOptionFunc(func(cfg *otelConfig) {
|
||||
cfg.Attributes = attributes
|
||||
})
|
||||
}
|
||||
|
||||
// WithClient specifies http client to use.
|
||||
func WithClient(client ht.Client) ClientOption {
|
||||
return optionFunc[clientConfig](func(cfg *clientConfig) {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
19
pkg/api/oas_defaults_gen.go
Normal file
19
pkg/api/oas_defaults_gen.go
Normal file
@@ -0,0 +1,19 @@
|
||||
// Code generated by ogen, DO NOT EDIT.
|
||||
|
||||
package api
|
||||
|
||||
// setDefaults set default value of fields.
|
||||
func (s *BatchAssetThumbnailsReq) setDefaults() {
|
||||
{
|
||||
val := BatchAssetThumbnailsReqSize("420x420")
|
||||
s.Size.SetTo(val)
|
||||
}
|
||||
}
|
||||
|
||||
// setDefaults set default value of fields.
|
||||
func (s *BatchUserThumbnailsReq) setDefaults() {
|
||||
{
|
||||
val := BatchUserThumbnailsReqSize("150x150")
|
||||
s.Size.SetTo(val)
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -12,10 +12,12 @@ const (
|
||||
ActionMapfixResetSubmittingOperation OperationName = "ActionMapfixResetSubmitting"
|
||||
ActionMapfixRetryValidateOperation OperationName = "ActionMapfixRetryValidate"
|
||||
ActionMapfixRevokeOperation OperationName = "ActionMapfixRevoke"
|
||||
ActionMapfixTriggerReleaseOperation OperationName = "ActionMapfixTriggerRelease"
|
||||
ActionMapfixTriggerSubmitOperation OperationName = "ActionMapfixTriggerSubmit"
|
||||
ActionMapfixTriggerSubmitUncheckedOperation OperationName = "ActionMapfixTriggerSubmitUnchecked"
|
||||
ActionMapfixTriggerUploadOperation OperationName = "ActionMapfixTriggerUpload"
|
||||
ActionMapfixTriggerValidateOperation OperationName = "ActionMapfixTriggerValidate"
|
||||
ActionMapfixUploadedOperation OperationName = "ActionMapfixUploaded"
|
||||
ActionMapfixValidatedOperation OperationName = "ActionMapfixValidated"
|
||||
ActionSubmissionAcceptedOperation OperationName = "ActionSubmissionAccepted"
|
||||
ActionSubmissionRejectOperation OperationName = "ActionSubmissionReject"
|
||||
@@ -28,6 +30,9 @@ const (
|
||||
ActionSubmissionTriggerUploadOperation OperationName = "ActionSubmissionTriggerUpload"
|
||||
ActionSubmissionTriggerValidateOperation OperationName = "ActionSubmissionTriggerValidate"
|
||||
ActionSubmissionValidatedOperation OperationName = "ActionSubmissionValidated"
|
||||
BatchAssetThumbnailsOperation OperationName = "BatchAssetThumbnails"
|
||||
BatchUserThumbnailsOperation OperationName = "BatchUserThumbnails"
|
||||
BatchUsernamesOperation OperationName = "BatchUsernames"
|
||||
CreateMapfixOperation OperationName = "CreateMapfix"
|
||||
CreateMapfixAuditCommentOperation OperationName = "CreateMapfixAuditComment"
|
||||
CreateScriptOperation OperationName = "CreateScript"
|
||||
@@ -38,12 +43,15 @@ const (
|
||||
DeleteScriptOperation OperationName = "DeleteScript"
|
||||
DeleteScriptPolicyOperation OperationName = "DeleteScriptPolicy"
|
||||
DownloadMapAssetOperation OperationName = "DownloadMapAsset"
|
||||
GetAssetThumbnailOperation OperationName = "GetAssetThumbnail"
|
||||
GetMapOperation OperationName = "GetMap"
|
||||
GetMapfixOperation OperationName = "GetMapfix"
|
||||
GetOperationOperation OperationName = "GetOperation"
|
||||
GetScriptOperation OperationName = "GetScript"
|
||||
GetScriptPolicyOperation OperationName = "GetScriptPolicy"
|
||||
GetStatsOperation OperationName = "GetStats"
|
||||
GetSubmissionOperation OperationName = "GetSubmission"
|
||||
GetUserThumbnailOperation OperationName = "GetUserThumbnail"
|
||||
ListMapfixAuditEventsOperation OperationName = "ListMapfixAuditEvents"
|
||||
ListMapfixesOperation OperationName = "ListMapfixes"
|
||||
ListMapsOperation OperationName = "ListMaps"
|
||||
@@ -57,6 +65,7 @@ const (
|
||||
SessionValidateOperation OperationName = "SessionValidate"
|
||||
SetMapfixCompletedOperation OperationName = "SetMapfixCompleted"
|
||||
SetSubmissionCompletedOperation OperationName = "SetSubmissionCompleted"
|
||||
UpdateMapfixDescriptionOperation OperationName = "UpdateMapfixDescription"
|
||||
UpdateMapfixModelOperation OperationName = "UpdateMapfixModel"
|
||||
UpdateScriptOperation OperationName = "UpdateScript"
|
||||
UpdateScriptPolicyOperation OperationName = "UpdateScriptPolicy"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,6 +3,7 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime"
|
||||
@@ -10,13 +11,13 @@ import (
|
||||
|
||||
"github.com/go-faster/errors"
|
||||
"github.com/go-faster/jx"
|
||||
|
||||
"github.com/ogen-go/ogen/ogenerrors"
|
||||
"github.com/ogen-go/ogen/validate"
|
||||
)
|
||||
|
||||
func (s *Server) decodeCreateMapfixRequest(r *http.Request) (
|
||||
req *MapfixTriggerCreate,
|
||||
func (s *Server) decodeBatchAssetThumbnailsRequest(r *http.Request) (
|
||||
req *BatchAssetThumbnailsReq,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
@@ -37,22 +38,266 @@ func (s *Server) decodeCreateMapfixRequest(r *http.Request) (
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, close, errors.Wrap(err, "parse media type")
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request BatchAssetThumbnailsReq
|
||||
if err := func() error {
|
||||
if err := request.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeBatchUserThumbnailsRequest(r *http.Request) (
|
||||
req *BatchUserThumbnailsReq,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
var closers []func() error
|
||||
close = func() error {
|
||||
var merr error
|
||||
// Close in reverse order, to match defer behavior.
|
||||
for i := len(closers) - 1; i >= 0; i-- {
|
||||
c := closers[i]
|
||||
merr = errors.Join(merr, c())
|
||||
}
|
||||
return merr
|
||||
}
|
||||
defer func() {
|
||||
if rerr != nil {
|
||||
rerr = errors.Join(rerr, close())
|
||||
}
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request BatchUserThumbnailsReq
|
||||
if err := func() error {
|
||||
if err := request.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeBatchUsernamesRequest(r *http.Request) (
|
||||
req *BatchUsernamesReq,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
var closers []func() error
|
||||
close = func() error {
|
||||
var merr error
|
||||
// Close in reverse order, to match defer behavior.
|
||||
for i := len(closers) - 1; i >= 0; i-- {
|
||||
c := closers[i]
|
||||
merr = errors.Join(merr, c())
|
||||
}
|
||||
return merr
|
||||
}
|
||||
defer func() {
|
||||
if rerr != nil {
|
||||
rerr = errors.Join(rerr, close())
|
||||
}
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request BatchUsernamesReq
|
||||
if err := func() error {
|
||||
if err := request.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeCreateMapfixRequest(r *http.Request) (
|
||||
req *MapfixTriggerCreate,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
var closers []func() error
|
||||
close = func() error {
|
||||
var merr error
|
||||
// Close in reverse order, to match defer behavior.
|
||||
for i := len(closers) - 1; i >= 0; i-- {
|
||||
c := closers[i]
|
||||
merr = errors.Join(merr, c())
|
||||
}
|
||||
return merr
|
||||
}
|
||||
defer func() {
|
||||
if rerr != nil {
|
||||
rerr = errors.Join(rerr, close())
|
||||
}
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request MapfixTriggerCreate
|
||||
@@ -70,7 +315,7 @@ func (s *Server) decodeCreateMapfixRequest(r *http.Request) (
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
@@ -78,16 +323,17 @@ func (s *Server) decodeCreateMapfixRequest(r *http.Request) (
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, close, errors.Wrap(err, "validate")
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, close, nil
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, close, validate.InvalidContentType(ct)
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeCreateMapfixAuditCommentRequest(r *http.Request) (
|
||||
req CreateMapfixAuditCommentReq,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
@@ -108,20 +354,21 @@ func (s *Server) decodeCreateMapfixAuditCommentRequest(r *http.Request) (
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, close, errors.Wrap(err, "parse media type")
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "text/plain":
|
||||
reader := r.Body
|
||||
request := CreateMapfixAuditCommentReq{Data: reader}
|
||||
return request, close, nil
|
||||
return request, rawBody, close, nil
|
||||
default:
|
||||
return req, close, validate.InvalidContentType(ct)
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeCreateScriptRequest(r *http.Request) (
|
||||
req *ScriptCreate,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
@@ -142,22 +389,29 @@ func (s *Server) decodeCreateScriptRequest(r *http.Request) (
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, close, errors.Wrap(err, "parse media type")
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request ScriptCreate
|
||||
@@ -175,7 +429,7 @@ func (s *Server) decodeCreateScriptRequest(r *http.Request) (
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
@@ -183,16 +437,17 @@ func (s *Server) decodeCreateScriptRequest(r *http.Request) (
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, close, errors.Wrap(err, "validate")
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, close, nil
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, close, validate.InvalidContentType(ct)
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeCreateScriptPolicyRequest(r *http.Request) (
|
||||
req *ScriptPolicyCreate,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
@@ -213,22 +468,29 @@ func (s *Server) decodeCreateScriptPolicyRequest(r *http.Request) (
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, close, errors.Wrap(err, "parse media type")
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request ScriptPolicyCreate
|
||||
@@ -246,7 +508,7 @@ func (s *Server) decodeCreateScriptPolicyRequest(r *http.Request) (
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
@@ -254,16 +516,17 @@ func (s *Server) decodeCreateScriptPolicyRequest(r *http.Request) (
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, close, errors.Wrap(err, "validate")
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, close, nil
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, close, validate.InvalidContentType(ct)
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeCreateSubmissionRequest(r *http.Request) (
|
||||
req *SubmissionTriggerCreate,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
@@ -284,22 +547,29 @@ func (s *Server) decodeCreateSubmissionRequest(r *http.Request) (
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, close, errors.Wrap(err, "parse media type")
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request SubmissionTriggerCreate
|
||||
@@ -317,7 +587,7 @@ func (s *Server) decodeCreateSubmissionRequest(r *http.Request) (
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
@@ -325,16 +595,17 @@ func (s *Server) decodeCreateSubmissionRequest(r *http.Request) (
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, close, errors.Wrap(err, "validate")
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, close, nil
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, close, validate.InvalidContentType(ct)
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeCreateSubmissionAdminRequest(r *http.Request) (
|
||||
req *SubmissionTriggerCreate,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
@@ -355,22 +626,29 @@ func (s *Server) decodeCreateSubmissionAdminRequest(r *http.Request) (
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, close, errors.Wrap(err, "parse media type")
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request SubmissionTriggerCreate
|
||||
@@ -388,7 +666,7 @@ func (s *Server) decodeCreateSubmissionAdminRequest(r *http.Request) (
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
@@ -396,16 +674,17 @@ func (s *Server) decodeCreateSubmissionAdminRequest(r *http.Request) (
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, close, errors.Wrap(err, "validate")
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, close, nil
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, close, validate.InvalidContentType(ct)
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeCreateSubmissionAuditCommentRequest(r *http.Request) (
|
||||
req CreateSubmissionAuditCommentReq,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
@@ -426,20 +705,21 @@ func (s *Server) decodeCreateSubmissionAuditCommentRequest(r *http.Request) (
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, close, errors.Wrap(err, "parse media type")
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "text/plain":
|
||||
reader := r.Body
|
||||
request := CreateSubmissionAuditCommentReq{Data: reader}
|
||||
return request, close, nil
|
||||
return request, rawBody, close, nil
|
||||
default:
|
||||
return req, close, validate.InvalidContentType(ct)
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeReleaseSubmissionsRequest(r *http.Request) (
|
||||
req []ReleaseInfo,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
@@ -460,22 +740,29 @@ func (s *Server) decodeReleaseSubmissionsRequest(r *http.Request) (
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, close, errors.Wrap(err, "parse media type")
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request []ReleaseInfo
|
||||
@@ -501,7 +788,7 @@ func (s *Server) decodeReleaseSubmissionsRequest(r *http.Request) (
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if request == nil {
|
||||
@@ -534,16 +821,17 @@ func (s *Server) decodeReleaseSubmissionsRequest(r *http.Request) (
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, close, errors.Wrap(err, "validate")
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return request, close, nil
|
||||
return request, rawBody, close, nil
|
||||
default:
|
||||
return req, close, validate.InvalidContentType(ct)
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeUpdateScriptRequest(r *http.Request) (
|
||||
req *ScriptUpdate,
|
||||
func (s *Server) decodeUpdateMapfixDescriptionRequest(r *http.Request) (
|
||||
req UpdateMapfixDescriptionReq,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
@@ -564,22 +852,64 @@ func (s *Server) decodeUpdateScriptRequest(r *http.Request) (
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, close, errors.Wrap(err, "parse media type")
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "text/plain":
|
||||
reader := r.Body
|
||||
request := UpdateMapfixDescriptionReq{Data: reader}
|
||||
return request, rawBody, close, nil
|
||||
default:
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeUpdateScriptRequest(r *http.Request) (
|
||||
req *ScriptUpdate,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
var closers []func() error
|
||||
close = func() error {
|
||||
var merr error
|
||||
// Close in reverse order, to match defer behavior.
|
||||
for i := len(closers) - 1; i >= 0; i-- {
|
||||
c := closers[i]
|
||||
merr = errors.Join(merr, c())
|
||||
}
|
||||
return merr
|
||||
}
|
||||
defer func() {
|
||||
if rerr != nil {
|
||||
rerr = errors.Join(rerr, close())
|
||||
}
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request ScriptUpdate
|
||||
@@ -597,7 +927,7 @@ func (s *Server) decodeUpdateScriptRequest(r *http.Request) (
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
@@ -605,16 +935,17 @@ func (s *Server) decodeUpdateScriptRequest(r *http.Request) (
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, close, errors.Wrap(err, "validate")
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, close, nil
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, close, validate.InvalidContentType(ct)
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) decodeUpdateScriptPolicyRequest(r *http.Request) (
|
||||
req *ScriptPolicyUpdate,
|
||||
rawBody []byte,
|
||||
close func() error,
|
||||
rerr error,
|
||||
) {
|
||||
@@ -635,22 +966,29 @@ func (s *Server) decodeUpdateScriptPolicyRequest(r *http.Request) (
|
||||
}()
|
||||
ct, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return req, close, errors.Wrap(err, "parse media type")
|
||||
return req, rawBody, close, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
if r.ContentLength == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
defer func() {
|
||||
_ = r.Body.Close()
|
||||
}()
|
||||
if err != nil {
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
|
||||
// Reset the body to allow for downstream reading.
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||
|
||||
if len(buf) == 0 {
|
||||
return req, close, validate.ErrBodyRequired
|
||||
return req, rawBody, close, validate.ErrBodyRequired
|
||||
}
|
||||
|
||||
rawBody = append(rawBody, buf...)
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var request ScriptPolicyUpdate
|
||||
@@ -668,7 +1006,7 @@ func (s *Server) decodeUpdateScriptPolicyRequest(r *http.Request) (
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return req, close, err
|
||||
return req, rawBody, close, err
|
||||
}
|
||||
if err := func() error {
|
||||
if err := request.Validate(); err != nil {
|
||||
@@ -676,10 +1014,10 @@ func (s *Server) decodeUpdateScriptPolicyRequest(r *http.Request) (
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return req, close, errors.Wrap(err, "validate")
|
||||
return req, rawBody, close, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &request, close, nil
|
||||
return &request, rawBody, close, nil
|
||||
default:
|
||||
return req, close, validate.InvalidContentType(ct)
|
||||
return req, rawBody, close, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,10 +7,51 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/go-faster/jx"
|
||||
|
||||
ht "github.com/ogen-go/ogen/http"
|
||||
)
|
||||
|
||||
func encodeBatchAssetThumbnailsRequest(
|
||||
req *BatchAssetThumbnailsReq,
|
||||
r *http.Request,
|
||||
) error {
|
||||
const contentType = "application/json"
|
||||
e := new(jx.Encoder)
|
||||
{
|
||||
req.Encode(e)
|
||||
}
|
||||
encoded := e.Bytes()
|
||||
ht.SetBody(r, bytes.NewReader(encoded), contentType)
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeBatchUserThumbnailsRequest(
|
||||
req *BatchUserThumbnailsReq,
|
||||
r *http.Request,
|
||||
) error {
|
||||
const contentType = "application/json"
|
||||
e := new(jx.Encoder)
|
||||
{
|
||||
req.Encode(e)
|
||||
}
|
||||
encoded := e.Bytes()
|
||||
ht.SetBody(r, bytes.NewReader(encoded), contentType)
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeBatchUsernamesRequest(
|
||||
req *BatchUsernamesReq,
|
||||
r *http.Request,
|
||||
) error {
|
||||
const contentType = "application/json"
|
||||
e := new(jx.Encoder)
|
||||
{
|
||||
req.Encode(e)
|
||||
}
|
||||
encoded := e.Bytes()
|
||||
ht.SetBody(r, bytes.NewReader(encoded), contentType)
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeCreateMapfixRequest(
|
||||
req *MapfixTriggerCreate,
|
||||
r *http.Request,
|
||||
@@ -119,6 +160,16 @@ func encodeReleaseSubmissionsRequest(
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeUpdateMapfixDescriptionRequest(
|
||||
req UpdateMapfixDescriptionReq,
|
||||
r *http.Request,
|
||||
) error {
|
||||
const contentType = "text/plain"
|
||||
body := req
|
||||
ht.SetBody(r, body, contentType)
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeUpdateScriptRequest(
|
||||
req *ScriptUpdate,
|
||||
r *http.Request,
|
||||
|
||||
@@ -11,8 +11,9 @@ import (
|
||||
|
||||
"github.com/go-faster/errors"
|
||||
"github.com/go-faster/jx"
|
||||
|
||||
"github.com/ogen-go/ogen/conv"
|
||||
"github.com/ogen-go/ogen/ogenerrors"
|
||||
"github.com/ogen-go/ogen/uri"
|
||||
"github.com/ogen-go/ogen/validate"
|
||||
)
|
||||
|
||||
@@ -376,6 +377,66 @@ func decodeActionMapfixRevokeResponse(resp *http.Response) (res *ActionMapfixRev
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeActionMapfixTriggerReleaseResponse(resp *http.Response) (res *ActionMapfixTriggerReleaseNoContent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 204:
|
||||
// Code 204.
|
||||
return &ActionMapfixTriggerReleaseNoContent{}, nil
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeActionMapfixTriggerSubmitResponse(resp *http.Response) (res *ActionMapfixTriggerSubmitNoContent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 204:
|
||||
@@ -616,6 +677,66 @@ func decodeActionMapfixTriggerValidateResponse(resp *http.Response) (res *Action
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeActionMapfixUploadedResponse(resp *http.Response) (res *ActionMapfixUploadedNoContent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 204:
|
||||
// Code 204.
|
||||
return &ActionMapfixUploadedNoContent{}, nil
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeActionMapfixValidatedResponse(resp *http.Response) (res *ActionMapfixValidatedNoContent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 204:
|
||||
@@ -1336,6 +1457,282 @@ func decodeActionSubmissionValidatedResponse(resp *http.Response) (res *ActionSu
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeBatchAssetThumbnailsResponse(resp *http.Response) (res *BatchAssetThumbnailsOK, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
// Code 200.
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response BatchAssetThumbnailsOK
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
return &response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeBatchUserThumbnailsResponse(resp *http.Response) (res *BatchUserThumbnailsOK, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
// Code 200.
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response BatchUserThumbnailsOK
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
return &response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeBatchUsernamesResponse(resp *http.Response) (res *BatchUsernamesOK, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
// Code 200.
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response BatchUsernamesOK
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
return &response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeCreateMapfixResponse(resp *http.Response) (res *OperationID, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 201:
|
||||
@@ -2157,6 +2554,105 @@ func decodeDownloadMapAssetResponse(resp *http.Response) (res DownloadMapAssetOK
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeGetAssetThumbnailResponse(resp *http.Response) (res *GetAssetThumbnailFound, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 302:
|
||||
// Code 302.
|
||||
var wrapper GetAssetThumbnailFound
|
||||
h := uri.NewHeaderDecoder(resp.Header)
|
||||
// Parse "Location" header.
|
||||
{
|
||||
cfg := uri.HeaderParameterDecodingConfig{
|
||||
Name: "Location",
|
||||
Explode: false,
|
||||
}
|
||||
if err := func() error {
|
||||
if err := h.HasParam(cfg); err == nil {
|
||||
if err := h.DecodeParam(cfg, func(d uri.Decoder) error {
|
||||
var wrapperDotLocationVal string
|
||||
if err := func() error {
|
||||
val, err := d.DecodeValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := conv.ToString(val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
wrapperDotLocationVal = c
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
wrapper.Location.SetTo(wrapperDotLocationVal)
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "parse Location header")
|
||||
}
|
||||
}
|
||||
return &wrapper, nil
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeGetMapResponse(resp *http.Response) (res *Map, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
@@ -2662,6 +3158,107 @@ func decodeGetScriptPolicyResponse(resp *http.Response) (res *ScriptPolicy, _ er
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeGetStatsResponse(resp *http.Response) (res *Stats, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
// Code 200.
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Stats
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeGetSubmissionResponse(resp *http.Response) (res *Submission, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
@@ -2763,6 +3360,105 @@ func decodeGetSubmissionResponse(resp *http.Response) (res *Submission, _ error)
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeGetUserThumbnailResponse(resp *http.Response) (res *GetUserThumbnailFound, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 302:
|
||||
// Code 302.
|
||||
var wrapper GetUserThumbnailFound
|
||||
h := uri.NewHeaderDecoder(resp.Header)
|
||||
// Parse "Location" header.
|
||||
{
|
||||
cfg := uri.HeaderParameterDecodingConfig{
|
||||
Name: "Location",
|
||||
Explode: false,
|
||||
}
|
||||
if err := func() error {
|
||||
if err := h.HasParam(cfg); err == nil {
|
||||
if err := h.DecodeParam(cfg, func(d uri.Decoder) error {
|
||||
var wrapperDotLocationVal string
|
||||
if err := func() error {
|
||||
val, err := d.DecodeValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := conv.ToString(val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
wrapperDotLocationVal = c
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return err
|
||||
}
|
||||
wrapper.Location.SetTo(wrapperDotLocationVal)
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "parse Location header")
|
||||
}
|
||||
}
|
||||
return &wrapper, nil
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeListMapfixAuditEventsResponse(resp *http.Response) (res []AuditEvent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
@@ -3595,11 +4291,52 @@ func decodeListSubmissionsResponse(resp *http.Response) (res *Submissions, _ err
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeReleaseSubmissionsResponse(resp *http.Response) (res *ReleaseSubmissionsCreated, _ error) {
|
||||
func decodeReleaseSubmissionsResponse(resp *http.Response) (res *OperationID, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 201:
|
||||
// Code 201.
|
||||
return &ReleaseSubmissionsCreated{}, nil
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response OperationID
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &response, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
@@ -4071,6 +4808,66 @@ func decodeSetSubmissionCompletedResponse(resp *http.Response) (res *SetSubmissi
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeUpdateMapfixDescriptionResponse(resp *http.Response) (res *UpdateMapfixDescriptionNoContent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 204:
|
||||
// Code 204.
|
||||
return &UpdateMapfixDescriptionNoContent{}, nil
|
||||
}
|
||||
// Convenient error response.
|
||||
defRes, err := func() (res *ErrorStatusCode, err error) {
|
||||
ct, _, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
return res, errors.Wrap(err, "parse media type")
|
||||
}
|
||||
switch {
|
||||
case ct == "application/json":
|
||||
buf, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
d := jx.DecodeBytes(buf)
|
||||
|
||||
var response Error
|
||||
if err := func() error {
|
||||
if err := response.Decode(d); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Skip(); err != io.EOF {
|
||||
return errors.New("unexpected trailing data")
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
err = &ogenerrors.DecodeBodyError{
|
||||
ContentType: ct,
|
||||
Body: buf,
|
||||
Err: err,
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
// Validate response.
|
||||
if err := func() error {
|
||||
if err := response.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return res, errors.Wrap(err, "validate")
|
||||
}
|
||||
return &ErrorStatusCode{
|
||||
StatusCode: resp.StatusCode,
|
||||
Response: response,
|
||||
}, nil
|
||||
default:
|
||||
return res, validate.InvalidContentType(ct)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
return res, errors.Wrapf(err, "default (code %d)", resp.StatusCode)
|
||||
}
|
||||
return res, errors.Wrap(defRes, "error")
|
||||
}
|
||||
|
||||
func decodeUpdateMapfixModelResponse(resp *http.Response) (res *UpdateMapfixModelNoContent, _ error) {
|
||||
switch resp.StatusCode {
|
||||
case 204:
|
||||
|
||||
@@ -8,10 +8,11 @@ import (
|
||||
|
||||
"github.com/go-faster/errors"
|
||||
"github.com/go-faster/jx"
|
||||
"github.com/ogen-go/ogen/conv"
|
||||
ht "github.com/ogen-go/ogen/http"
|
||||
"github.com/ogen-go/ogen/uri"
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
|
||||
ht "github.com/ogen-go/ogen/http"
|
||||
)
|
||||
|
||||
func encodeActionMapfixAcceptedResponse(response *ActionMapfixAcceptedNoContent, w http.ResponseWriter, span trace.Span) error {
|
||||
@@ -56,6 +57,13 @@ func encodeActionMapfixRevokeResponse(response *ActionMapfixRevokeNoContent, w h
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeActionMapfixTriggerReleaseResponse(response *ActionMapfixTriggerReleaseNoContent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.WriteHeader(204)
|
||||
span.SetStatus(codes.Ok, http.StatusText(204))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeActionMapfixTriggerSubmitResponse(response *ActionMapfixTriggerSubmitNoContent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.WriteHeader(204)
|
||||
span.SetStatus(codes.Ok, http.StatusText(204))
|
||||
@@ -84,6 +92,13 @@ func encodeActionMapfixTriggerValidateResponse(response *ActionMapfixTriggerVali
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeActionMapfixUploadedResponse(response *ActionMapfixUploadedNoContent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.WriteHeader(204)
|
||||
span.SetStatus(codes.Ok, http.StatusText(204))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeActionMapfixValidatedResponse(response *ActionMapfixValidatedNoContent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.WriteHeader(204)
|
||||
span.SetStatus(codes.Ok, http.StatusText(204))
|
||||
@@ -168,6 +183,48 @@ func encodeActionSubmissionValidatedResponse(response *ActionSubmissionValidated
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeBatchAssetThumbnailsResponse(response *BatchAssetThumbnailsOK, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
span.SetStatus(codes.Ok, http.StatusText(200))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
response.Encode(e)
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeBatchUserThumbnailsResponse(response *BatchUserThumbnailsOK, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
span.SetStatus(codes.Ok, http.StatusText(200))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
response.Encode(e)
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeBatchUsernamesResponse(response *BatchUsernamesOK, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
span.SetStatus(codes.Ok, http.StatusText(200))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
response.Encode(e)
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeCreateMapfixResponse(response *OperationID, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(201)
|
||||
@@ -282,6 +339,32 @@ func encodeDownloadMapAssetResponse(response DownloadMapAssetOK, w http.Response
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeGetAssetThumbnailResponse(response *GetAssetThumbnailFound, w http.ResponseWriter, span trace.Span) error {
|
||||
// Encoding response headers.
|
||||
{
|
||||
h := uri.NewHeaderEncoder(w.Header())
|
||||
// Encode "Location" header.
|
||||
{
|
||||
cfg := uri.HeaderParameterEncodingConfig{
|
||||
Name: "Location",
|
||||
Explode: false,
|
||||
}
|
||||
if err := h.EncodeParam(cfg, func(e uri.Encoder) error {
|
||||
if val, ok := response.Location.Get(); ok {
|
||||
return e.EncodeValue(conv.StringToString(val))
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
return errors.Wrap(err, "encode Location header")
|
||||
}
|
||||
}
|
||||
}
|
||||
w.WriteHeader(302)
|
||||
span.SetStatus(codes.Ok, http.StatusText(302))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeGetMapResponse(response *Map, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
@@ -352,6 +435,20 @@ func encodeGetScriptPolicyResponse(response *ScriptPolicy, w http.ResponseWriter
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeGetStatsResponse(response *Stats, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
span.SetStatus(codes.Ok, http.StatusText(200))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
response.Encode(e)
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeGetSubmissionResponse(response *Submission, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
@@ -366,6 +463,32 @@ func encodeGetSubmissionResponse(response *Submission, w http.ResponseWriter, sp
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeGetUserThumbnailResponse(response *GetUserThumbnailFound, w http.ResponseWriter, span trace.Span) error {
|
||||
// Encoding response headers.
|
||||
{
|
||||
h := uri.NewHeaderEncoder(w.Header())
|
||||
// Encode "Location" header.
|
||||
{
|
||||
cfg := uri.HeaderParameterEncodingConfig{
|
||||
Name: "Location",
|
||||
Explode: false,
|
||||
}
|
||||
if err := h.EncodeParam(cfg, func(e uri.Encoder) error {
|
||||
if val, ok := response.Location.Get(); ok {
|
||||
return e.EncodeValue(conv.StringToString(val))
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
return errors.Wrap(err, "encode Location header")
|
||||
}
|
||||
}
|
||||
}
|
||||
w.WriteHeader(302)
|
||||
span.SetStatus(codes.Ok, http.StatusText(302))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeListMapfixAuditEventsResponse(response []AuditEvent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(200)
|
||||
@@ -484,10 +607,17 @@ func encodeListSubmissionsResponse(response *Submissions, w http.ResponseWriter,
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeReleaseSubmissionsResponse(response *ReleaseSubmissionsCreated, w http.ResponseWriter, span trace.Span) error {
|
||||
func encodeReleaseSubmissionsResponse(response *OperationID, w http.ResponseWriter, span trace.Span) error {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(201)
|
||||
span.SetStatus(codes.Ok, http.StatusText(201))
|
||||
|
||||
e := new(jx.Encoder)
|
||||
response.Encode(e)
|
||||
if _, err := e.WriteTo(w); err != nil {
|
||||
return errors.Wrap(err, "write")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -547,6 +677,13 @@ func encodeSetSubmissionCompletedResponse(response *SetSubmissionCompletedNoCont
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeUpdateMapfixDescriptionResponse(response *UpdateMapfixDescriptionNoContent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.WriteHeader(204)
|
||||
span.SetStatus(codes.Ok, http.StatusText(204))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeUpdateMapfixModelResponse(response *UpdateMapfixModelNoContent, w http.ResponseWriter, span trace.Span) error {
|
||||
w.WriteHeader(204)
|
||||
span.SetStatus(codes.Ok, http.StatusText(204))
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,6 +7,7 @@ import (
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/go-faster/errors"
|
||||
"github.com/go-faster/jx"
|
||||
)
|
||||
|
||||
@@ -32,6 +33,9 @@ type ActionMapfixRetryValidateNoContent struct{}
|
||||
// ActionMapfixRevokeNoContent is response for ActionMapfixRevoke operation.
|
||||
type ActionMapfixRevokeNoContent struct{}
|
||||
|
||||
// ActionMapfixTriggerReleaseNoContent is response for ActionMapfixTriggerRelease operation.
|
||||
type ActionMapfixTriggerReleaseNoContent struct{}
|
||||
|
||||
// ActionMapfixTriggerSubmitNoContent is response for ActionMapfixTriggerSubmit operation.
|
||||
type ActionMapfixTriggerSubmitNoContent struct{}
|
||||
|
||||
@@ -44,6 +48,9 @@ type ActionMapfixTriggerUploadNoContent struct{}
|
||||
// ActionMapfixTriggerValidateNoContent is response for ActionMapfixTriggerValidate operation.
|
||||
type ActionMapfixTriggerValidateNoContent struct{}
|
||||
|
||||
// ActionMapfixUploadedNoContent is response for ActionMapfixUploaded operation.
|
||||
type ActionMapfixUploadedNoContent struct{}
|
||||
|
||||
// ActionMapfixValidatedNoContent is response for ActionMapfixValidated operation.
|
||||
type ActionMapfixValidatedNoContent struct{}
|
||||
|
||||
@@ -186,6 +193,254 @@ func (s *AuditEventEventData) init() AuditEventEventData {
|
||||
return m
|
||||
}
|
||||
|
||||
type BatchAssetThumbnailsOK struct {
|
||||
// Map of asset ID to thumbnail URL.
|
||||
Thumbnails OptBatchAssetThumbnailsOKThumbnails `json:"thumbnails"`
|
||||
}
|
||||
|
||||
// GetThumbnails returns the value of Thumbnails.
|
||||
func (s *BatchAssetThumbnailsOK) GetThumbnails() OptBatchAssetThumbnailsOKThumbnails {
|
||||
return s.Thumbnails
|
||||
}
|
||||
|
||||
// SetThumbnails sets the value of Thumbnails.
|
||||
func (s *BatchAssetThumbnailsOK) SetThumbnails(val OptBatchAssetThumbnailsOKThumbnails) {
|
||||
s.Thumbnails = val
|
||||
}
|
||||
|
||||
// Map of asset ID to thumbnail URL.
|
||||
type BatchAssetThumbnailsOKThumbnails map[string]string
|
||||
|
||||
func (s *BatchAssetThumbnailsOKThumbnails) init() BatchAssetThumbnailsOKThumbnails {
|
||||
m := *s
|
||||
if m == nil {
|
||||
m = map[string]string{}
|
||||
*s = m
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
type BatchAssetThumbnailsReq struct {
|
||||
// Array of asset IDs (max 100).
|
||||
AssetIds []uint64 `json:"assetIds"`
|
||||
// Thumbnail size.
|
||||
Size OptBatchAssetThumbnailsReqSize `json:"size"`
|
||||
}
|
||||
|
||||
// GetAssetIds returns the value of AssetIds.
|
||||
func (s *BatchAssetThumbnailsReq) GetAssetIds() []uint64 {
|
||||
return s.AssetIds
|
||||
}
|
||||
|
||||
// GetSize returns the value of Size.
|
||||
func (s *BatchAssetThumbnailsReq) GetSize() OptBatchAssetThumbnailsReqSize {
|
||||
return s.Size
|
||||
}
|
||||
|
||||
// SetAssetIds sets the value of AssetIds.
|
||||
func (s *BatchAssetThumbnailsReq) SetAssetIds(val []uint64) {
|
||||
s.AssetIds = val
|
||||
}
|
||||
|
||||
// SetSize sets the value of Size.
|
||||
func (s *BatchAssetThumbnailsReq) SetSize(val OptBatchAssetThumbnailsReqSize) {
|
||||
s.Size = val
|
||||
}
|
||||
|
||||
// Thumbnail size.
|
||||
type BatchAssetThumbnailsReqSize string
|
||||
|
||||
const (
|
||||
BatchAssetThumbnailsReqSize150x150 BatchAssetThumbnailsReqSize = "150x150"
|
||||
BatchAssetThumbnailsReqSize420x420 BatchAssetThumbnailsReqSize = "420x420"
|
||||
BatchAssetThumbnailsReqSize768x432 BatchAssetThumbnailsReqSize = "768x432"
|
||||
)
|
||||
|
||||
// AllValues returns all BatchAssetThumbnailsReqSize values.
|
||||
func (BatchAssetThumbnailsReqSize) AllValues() []BatchAssetThumbnailsReqSize {
|
||||
return []BatchAssetThumbnailsReqSize{
|
||||
BatchAssetThumbnailsReqSize150x150,
|
||||
BatchAssetThumbnailsReqSize420x420,
|
||||
BatchAssetThumbnailsReqSize768x432,
|
||||
}
|
||||
}
|
||||
|
||||
// MarshalText implements encoding.TextMarshaler.
|
||||
func (s BatchAssetThumbnailsReqSize) MarshalText() ([]byte, error) {
|
||||
switch s {
|
||||
case BatchAssetThumbnailsReqSize150x150:
|
||||
return []byte(s), nil
|
||||
case BatchAssetThumbnailsReqSize420x420:
|
||||
return []byte(s), nil
|
||||
case BatchAssetThumbnailsReqSize768x432:
|
||||
return []byte(s), nil
|
||||
default:
|
||||
return nil, errors.Errorf("invalid value: %q", s)
|
||||
}
|
||||
}
|
||||
|
||||
// UnmarshalText implements encoding.TextUnmarshaler.
|
||||
func (s *BatchAssetThumbnailsReqSize) UnmarshalText(data []byte) error {
|
||||
switch BatchAssetThumbnailsReqSize(data) {
|
||||
case BatchAssetThumbnailsReqSize150x150:
|
||||
*s = BatchAssetThumbnailsReqSize150x150
|
||||
return nil
|
||||
case BatchAssetThumbnailsReqSize420x420:
|
||||
*s = BatchAssetThumbnailsReqSize420x420
|
||||
return nil
|
||||
case BatchAssetThumbnailsReqSize768x432:
|
||||
*s = BatchAssetThumbnailsReqSize768x432
|
||||
return nil
|
||||
default:
|
||||
return errors.Errorf("invalid value: %q", data)
|
||||
}
|
||||
}
|
||||
|
||||
type BatchUserThumbnailsOK struct {
|
||||
// Map of user ID to thumbnail URL.
|
||||
Thumbnails OptBatchUserThumbnailsOKThumbnails `json:"thumbnails"`
|
||||
}
|
||||
|
||||
// GetThumbnails returns the value of Thumbnails.
|
||||
func (s *BatchUserThumbnailsOK) GetThumbnails() OptBatchUserThumbnailsOKThumbnails {
|
||||
return s.Thumbnails
|
||||
}
|
||||
|
||||
// SetThumbnails sets the value of Thumbnails.
|
||||
func (s *BatchUserThumbnailsOK) SetThumbnails(val OptBatchUserThumbnailsOKThumbnails) {
|
||||
s.Thumbnails = val
|
||||
}
|
||||
|
||||
// Map of user ID to thumbnail URL.
|
||||
type BatchUserThumbnailsOKThumbnails map[string]string
|
||||
|
||||
func (s *BatchUserThumbnailsOKThumbnails) init() BatchUserThumbnailsOKThumbnails {
|
||||
m := *s
|
||||
if m == nil {
|
||||
m = map[string]string{}
|
||||
*s = m
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
type BatchUserThumbnailsReq struct {
|
||||
// Array of user IDs (max 100).
|
||||
UserIds []uint64 `json:"userIds"`
|
||||
// Thumbnail size.
|
||||
Size OptBatchUserThumbnailsReqSize `json:"size"`
|
||||
}
|
||||
|
||||
// GetUserIds returns the value of UserIds.
|
||||
func (s *BatchUserThumbnailsReq) GetUserIds() []uint64 {
|
||||
return s.UserIds
|
||||
}
|
||||
|
||||
// GetSize returns the value of Size.
|
||||
func (s *BatchUserThumbnailsReq) GetSize() OptBatchUserThumbnailsReqSize {
|
||||
return s.Size
|
||||
}
|
||||
|
||||
// SetUserIds sets the value of UserIds.
|
||||
func (s *BatchUserThumbnailsReq) SetUserIds(val []uint64) {
|
||||
s.UserIds = val
|
||||
}
|
||||
|
||||
// SetSize sets the value of Size.
|
||||
func (s *BatchUserThumbnailsReq) SetSize(val OptBatchUserThumbnailsReqSize) {
|
||||
s.Size = val
|
||||
}
|
||||
|
||||
// Thumbnail size.
|
||||
type BatchUserThumbnailsReqSize string
|
||||
|
||||
const (
|
||||
BatchUserThumbnailsReqSize150x150 BatchUserThumbnailsReqSize = "150x150"
|
||||
BatchUserThumbnailsReqSize420x420 BatchUserThumbnailsReqSize = "420x420"
|
||||
BatchUserThumbnailsReqSize768x432 BatchUserThumbnailsReqSize = "768x432"
|
||||
)
|
||||
|
||||
// AllValues returns all BatchUserThumbnailsReqSize values.
|
||||
func (BatchUserThumbnailsReqSize) AllValues() []BatchUserThumbnailsReqSize {
|
||||
return []BatchUserThumbnailsReqSize{
|
||||
BatchUserThumbnailsReqSize150x150,
|
||||
BatchUserThumbnailsReqSize420x420,
|
||||
BatchUserThumbnailsReqSize768x432,
|
||||
}
|
||||
}
|
||||
|
||||
// MarshalText implements encoding.TextMarshaler.
|
||||
func (s BatchUserThumbnailsReqSize) MarshalText() ([]byte, error) {
|
||||
switch s {
|
||||
case BatchUserThumbnailsReqSize150x150:
|
||||
return []byte(s), nil
|
||||
case BatchUserThumbnailsReqSize420x420:
|
||||
return []byte(s), nil
|
||||
case BatchUserThumbnailsReqSize768x432:
|
||||
return []byte(s), nil
|
||||
default:
|
||||
return nil, errors.Errorf("invalid value: %q", s)
|
||||
}
|
||||
}
|
||||
|
||||
// UnmarshalText implements encoding.TextUnmarshaler.
|
||||
func (s *BatchUserThumbnailsReqSize) UnmarshalText(data []byte) error {
|
||||
switch BatchUserThumbnailsReqSize(data) {
|
||||
case BatchUserThumbnailsReqSize150x150:
|
||||
*s = BatchUserThumbnailsReqSize150x150
|
||||
return nil
|
||||
case BatchUserThumbnailsReqSize420x420:
|
||||
*s = BatchUserThumbnailsReqSize420x420
|
||||
return nil
|
||||
case BatchUserThumbnailsReqSize768x432:
|
||||
*s = BatchUserThumbnailsReqSize768x432
|
||||
return nil
|
||||
default:
|
||||
return errors.Errorf("invalid value: %q", data)
|
||||
}
|
||||
}
|
||||
|
||||
type BatchUsernamesOK struct {
|
||||
// Map of user ID to username.
|
||||
Usernames OptBatchUsernamesOKUsernames `json:"usernames"`
|
||||
}
|
||||
|
||||
// GetUsernames returns the value of Usernames.
|
||||
func (s *BatchUsernamesOK) GetUsernames() OptBatchUsernamesOKUsernames {
|
||||
return s.Usernames
|
||||
}
|
||||
|
||||
// SetUsernames sets the value of Usernames.
|
||||
func (s *BatchUsernamesOK) SetUsernames(val OptBatchUsernamesOKUsernames) {
|
||||
s.Usernames = val
|
||||
}
|
||||
|
||||
// Map of user ID to username.
|
||||
type BatchUsernamesOKUsernames map[string]string
|
||||
|
||||
func (s *BatchUsernamesOKUsernames) init() BatchUsernamesOKUsernames {
|
||||
m := *s
|
||||
if m == nil {
|
||||
m = map[string]string{}
|
||||
*s = m
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
type BatchUsernamesReq struct {
|
||||
// Array of user IDs (max 100).
|
||||
UserIds []uint64 `json:"userIds"`
|
||||
}
|
||||
|
||||
// GetUserIds returns the value of UserIds.
|
||||
func (s *BatchUsernamesReq) GetUserIds() []uint64 {
|
||||
return s.UserIds
|
||||
}
|
||||
|
||||
// SetUserIds sets the value of UserIds.
|
||||
func (s *BatchUsernamesReq) SetUserIds(val []uint64) {
|
||||
s.UserIds = val
|
||||
}
|
||||
|
||||
type CookieAuth struct {
|
||||
APIKey string
|
||||
Roles []string
|
||||
@@ -318,6 +573,132 @@ func (s *ErrorStatusCode) SetResponse(val Error) {
|
||||
s.Response = val
|
||||
}
|
||||
|
||||
// GetAssetThumbnailFound is response for GetAssetThumbnail operation.
|
||||
type GetAssetThumbnailFound struct {
|
||||
Location OptString
|
||||
}
|
||||
|
||||
// GetLocation returns the value of Location.
|
||||
func (s *GetAssetThumbnailFound) GetLocation() OptString {
|
||||
return s.Location
|
||||
}
|
||||
|
||||
// SetLocation sets the value of Location.
|
||||
func (s *GetAssetThumbnailFound) SetLocation(val OptString) {
|
||||
s.Location = val
|
||||
}
|
||||
|
||||
type GetAssetThumbnailSize string
|
||||
|
||||
const (
|
||||
GetAssetThumbnailSize150x150 GetAssetThumbnailSize = "150x150"
|
||||
GetAssetThumbnailSize420x420 GetAssetThumbnailSize = "420x420"
|
||||
GetAssetThumbnailSize768x432 GetAssetThumbnailSize = "768x432"
|
||||
)
|
||||
|
||||
// AllValues returns all GetAssetThumbnailSize values.
|
||||
func (GetAssetThumbnailSize) AllValues() []GetAssetThumbnailSize {
|
||||
return []GetAssetThumbnailSize{
|
||||
GetAssetThumbnailSize150x150,
|
||||
GetAssetThumbnailSize420x420,
|
||||
GetAssetThumbnailSize768x432,
|
||||
}
|
||||
}
|
||||
|
||||
// MarshalText implements encoding.TextMarshaler.
|
||||
func (s GetAssetThumbnailSize) MarshalText() ([]byte, error) {
|
||||
switch s {
|
||||
case GetAssetThumbnailSize150x150:
|
||||
return []byte(s), nil
|
||||
case GetAssetThumbnailSize420x420:
|
||||
return []byte(s), nil
|
||||
case GetAssetThumbnailSize768x432:
|
||||
return []byte(s), nil
|
||||
default:
|
||||
return nil, errors.Errorf("invalid value: %q", s)
|
||||
}
|
||||
}
|
||||
|
||||
// UnmarshalText implements encoding.TextUnmarshaler.
|
||||
func (s *GetAssetThumbnailSize) UnmarshalText(data []byte) error {
|
||||
switch GetAssetThumbnailSize(data) {
|
||||
case GetAssetThumbnailSize150x150:
|
||||
*s = GetAssetThumbnailSize150x150
|
||||
return nil
|
||||
case GetAssetThumbnailSize420x420:
|
||||
*s = GetAssetThumbnailSize420x420
|
||||
return nil
|
||||
case GetAssetThumbnailSize768x432:
|
||||
*s = GetAssetThumbnailSize768x432
|
||||
return nil
|
||||
default:
|
||||
return errors.Errorf("invalid value: %q", data)
|
||||
}
|
||||
}
|
||||
|
||||
// GetUserThumbnailFound is response for GetUserThumbnail operation.
|
||||
type GetUserThumbnailFound struct {
|
||||
Location OptString
|
||||
}
|
||||
|
||||
// GetLocation returns the value of Location.
|
||||
func (s *GetUserThumbnailFound) GetLocation() OptString {
|
||||
return s.Location
|
||||
}
|
||||
|
||||
// SetLocation sets the value of Location.
|
||||
func (s *GetUserThumbnailFound) SetLocation(val OptString) {
|
||||
s.Location = val
|
||||
}
|
||||
|
||||
type GetUserThumbnailSize string
|
||||
|
||||
const (
|
||||
GetUserThumbnailSize150x150 GetUserThumbnailSize = "150x150"
|
||||
GetUserThumbnailSize420x420 GetUserThumbnailSize = "420x420"
|
||||
GetUserThumbnailSize768x432 GetUserThumbnailSize = "768x432"
|
||||
)
|
||||
|
||||
// AllValues returns all GetUserThumbnailSize values.
|
||||
func (GetUserThumbnailSize) AllValues() []GetUserThumbnailSize {
|
||||
return []GetUserThumbnailSize{
|
||||
GetUserThumbnailSize150x150,
|
||||
GetUserThumbnailSize420x420,
|
||||
GetUserThumbnailSize768x432,
|
||||
}
|
||||
}
|
||||
|
||||
// MarshalText implements encoding.TextMarshaler.
|
||||
func (s GetUserThumbnailSize) MarshalText() ([]byte, error) {
|
||||
switch s {
|
||||
case GetUserThumbnailSize150x150:
|
||||
return []byte(s), nil
|
||||
case GetUserThumbnailSize420x420:
|
||||
return []byte(s), nil
|
||||
case GetUserThumbnailSize768x432:
|
||||
return []byte(s), nil
|
||||
default:
|
||||
return nil, errors.Errorf("invalid value: %q", s)
|
||||
}
|
||||
}
|
||||
|
||||
// UnmarshalText implements encoding.TextUnmarshaler.
|
||||
func (s *GetUserThumbnailSize) UnmarshalText(data []byte) error {
|
||||
switch GetUserThumbnailSize(data) {
|
||||
case GetUserThumbnailSize150x150:
|
||||
*s = GetUserThumbnailSize150x150
|
||||
return nil
|
||||
case GetUserThumbnailSize420x420:
|
||||
*s = GetUserThumbnailSize420x420
|
||||
return nil
|
||||
case GetUserThumbnailSize768x432:
|
||||
*s = GetUserThumbnailSize768x432
|
||||
return nil
|
||||
default:
|
||||
return errors.Errorf("invalid value: %q", data)
|
||||
}
|
||||
}
|
||||
|
||||
// Ref: #/components/schemas/Map
|
||||
type Map struct {
|
||||
ID int64 `json:"ID"`
|
||||
@@ -456,19 +837,21 @@ func (s *Map) SetModes(val uint32) {
|
||||
|
||||
// Ref: #/components/schemas/Mapfix
|
||||
type Mapfix struct {
|
||||
ID int64 `json:"ID"`
|
||||
DisplayName string `json:"DisplayName"`
|
||||
Creator string `json:"Creator"`
|
||||
GameID int32 `json:"GameID"`
|
||||
CreatedAt int64 `json:"CreatedAt"`
|
||||
UpdatedAt int64 `json:"UpdatedAt"`
|
||||
Submitter int64 `json:"Submitter"`
|
||||
AssetID int64 `json:"AssetID"`
|
||||
AssetVersion int64 `json:"AssetVersion"`
|
||||
Completed bool `json:"Completed"`
|
||||
TargetAssetID int64 `json:"TargetAssetID"`
|
||||
StatusID int32 `json:"StatusID"`
|
||||
Description string `json:"Description"`
|
||||
ID int64 `json:"ID"`
|
||||
DisplayName string `json:"DisplayName"`
|
||||
Creator string `json:"Creator"`
|
||||
GameID int32 `json:"GameID"`
|
||||
CreatedAt int64 `json:"CreatedAt"`
|
||||
UpdatedAt int64 `json:"UpdatedAt"`
|
||||
Submitter int64 `json:"Submitter"`
|
||||
AssetID int64 `json:"AssetID"`
|
||||
AssetVersion int64 `json:"AssetVersion"`
|
||||
ValidatedAssetID OptInt64 `json:"ValidatedAssetID"`
|
||||
ValidatedAssetVersion OptInt64 `json:"ValidatedAssetVersion"`
|
||||
Completed bool `json:"Completed"`
|
||||
TargetAssetID int64 `json:"TargetAssetID"`
|
||||
StatusID int32 `json:"StatusID"`
|
||||
Description string `json:"Description"`
|
||||
}
|
||||
|
||||
// GetID returns the value of ID.
|
||||
@@ -516,6 +899,16 @@ func (s *Mapfix) GetAssetVersion() int64 {
|
||||
return s.AssetVersion
|
||||
}
|
||||
|
||||
// GetValidatedAssetID returns the value of ValidatedAssetID.
|
||||
func (s *Mapfix) GetValidatedAssetID() OptInt64 {
|
||||
return s.ValidatedAssetID
|
||||
}
|
||||
|
||||
// GetValidatedAssetVersion returns the value of ValidatedAssetVersion.
|
||||
func (s *Mapfix) GetValidatedAssetVersion() OptInt64 {
|
||||
return s.ValidatedAssetVersion
|
||||
}
|
||||
|
||||
// GetCompleted returns the value of Completed.
|
||||
func (s *Mapfix) GetCompleted() bool {
|
||||
return s.Completed
|
||||
@@ -581,6 +974,16 @@ func (s *Mapfix) SetAssetVersion(val int64) {
|
||||
s.AssetVersion = val
|
||||
}
|
||||
|
||||
// SetValidatedAssetID sets the value of ValidatedAssetID.
|
||||
func (s *Mapfix) SetValidatedAssetID(val OptInt64) {
|
||||
s.ValidatedAssetID = val
|
||||
}
|
||||
|
||||
// SetValidatedAssetVersion sets the value of ValidatedAssetVersion.
|
||||
func (s *Mapfix) SetValidatedAssetVersion(val OptInt64) {
|
||||
s.ValidatedAssetVersion = val
|
||||
}
|
||||
|
||||
// SetCompleted sets the value of Completed.
|
||||
func (s *Mapfix) SetCompleted(val bool) {
|
||||
s.Completed = val
|
||||
@@ -749,6 +1152,328 @@ func (s *OperationID) SetOperationID(val int32) {
|
||||
s.OperationID = val
|
||||
}
|
||||
|
||||
// NewOptBatchAssetThumbnailsOKThumbnails returns new OptBatchAssetThumbnailsOKThumbnails with value set to v.
|
||||
func NewOptBatchAssetThumbnailsOKThumbnails(v BatchAssetThumbnailsOKThumbnails) OptBatchAssetThumbnailsOKThumbnails {
|
||||
return OptBatchAssetThumbnailsOKThumbnails{
|
||||
Value: v,
|
||||
Set: true,
|
||||
}
|
||||
}
|
||||
|
||||
// OptBatchAssetThumbnailsOKThumbnails is optional BatchAssetThumbnailsOKThumbnails.
|
||||
type OptBatchAssetThumbnailsOKThumbnails struct {
|
||||
Value BatchAssetThumbnailsOKThumbnails
|
||||
Set bool
|
||||
}
|
||||
|
||||
// IsSet returns true if OptBatchAssetThumbnailsOKThumbnails was set.
|
||||
func (o OptBatchAssetThumbnailsOKThumbnails) IsSet() bool { return o.Set }
|
||||
|
||||
// Reset unsets value.
|
||||
func (o *OptBatchAssetThumbnailsOKThumbnails) Reset() {
|
||||
var v BatchAssetThumbnailsOKThumbnails
|
||||
o.Value = v
|
||||
o.Set = false
|
||||
}
|
||||
|
||||
// SetTo sets value to v.
|
||||
func (o *OptBatchAssetThumbnailsOKThumbnails) SetTo(v BatchAssetThumbnailsOKThumbnails) {
|
||||
o.Set = true
|
||||
o.Value = v
|
||||
}
|
||||
|
||||
// Get returns value and boolean that denotes whether value was set.
|
||||
func (o OptBatchAssetThumbnailsOKThumbnails) Get() (v BatchAssetThumbnailsOKThumbnails, ok bool) {
|
||||
if !o.Set {
|
||||
return v, false
|
||||
}
|
||||
return o.Value, true
|
||||
}
|
||||
|
||||
// Or returns value if set, or given parameter if does not.
|
||||
func (o OptBatchAssetThumbnailsOKThumbnails) Or(d BatchAssetThumbnailsOKThumbnails) BatchAssetThumbnailsOKThumbnails {
|
||||
if v, ok := o.Get(); ok {
|
||||
return v
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
// NewOptBatchAssetThumbnailsReqSize returns new OptBatchAssetThumbnailsReqSize with value set to v.
|
||||
func NewOptBatchAssetThumbnailsReqSize(v BatchAssetThumbnailsReqSize) OptBatchAssetThumbnailsReqSize {
|
||||
return OptBatchAssetThumbnailsReqSize{
|
||||
Value: v,
|
||||
Set: true,
|
||||
}
|
||||
}
|
||||
|
||||
// OptBatchAssetThumbnailsReqSize is optional BatchAssetThumbnailsReqSize.
|
||||
type OptBatchAssetThumbnailsReqSize struct {
|
||||
Value BatchAssetThumbnailsReqSize
|
||||
Set bool
|
||||
}
|
||||
|
||||
// IsSet returns true if OptBatchAssetThumbnailsReqSize was set.
|
||||
func (o OptBatchAssetThumbnailsReqSize) IsSet() bool { return o.Set }
|
||||
|
||||
// Reset unsets value.
|
||||
func (o *OptBatchAssetThumbnailsReqSize) Reset() {
|
||||
var v BatchAssetThumbnailsReqSize
|
||||
o.Value = v
|
||||
o.Set = false
|
||||
}
|
||||
|
||||
// SetTo sets value to v.
|
||||
func (o *OptBatchAssetThumbnailsReqSize) SetTo(v BatchAssetThumbnailsReqSize) {
|
||||
o.Set = true
|
||||
o.Value = v
|
||||
}
|
||||
|
||||
// Get returns value and boolean that denotes whether value was set.
|
||||
func (o OptBatchAssetThumbnailsReqSize) Get() (v BatchAssetThumbnailsReqSize, ok bool) {
|
||||
if !o.Set {
|
||||
return v, false
|
||||
}
|
||||
return o.Value, true
|
||||
}
|
||||
|
||||
// Or returns value if set, or given parameter if does not.
|
||||
func (o OptBatchAssetThumbnailsReqSize) Or(d BatchAssetThumbnailsReqSize) BatchAssetThumbnailsReqSize {
|
||||
if v, ok := o.Get(); ok {
|
||||
return v
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
// NewOptBatchUserThumbnailsOKThumbnails returns new OptBatchUserThumbnailsOKThumbnails with value set to v.
|
||||
func NewOptBatchUserThumbnailsOKThumbnails(v BatchUserThumbnailsOKThumbnails) OptBatchUserThumbnailsOKThumbnails {
|
||||
return OptBatchUserThumbnailsOKThumbnails{
|
||||
Value: v,
|
||||
Set: true,
|
||||
}
|
||||
}
|
||||
|
||||
// OptBatchUserThumbnailsOKThumbnails is optional BatchUserThumbnailsOKThumbnails.
|
||||
type OptBatchUserThumbnailsOKThumbnails struct {
|
||||
Value BatchUserThumbnailsOKThumbnails
|
||||
Set bool
|
||||
}
|
||||
|
||||
// IsSet returns true if OptBatchUserThumbnailsOKThumbnails was set.
|
||||
func (o OptBatchUserThumbnailsOKThumbnails) IsSet() bool { return o.Set }
|
||||
|
||||
// Reset unsets value.
|
||||
func (o *OptBatchUserThumbnailsOKThumbnails) Reset() {
|
||||
var v BatchUserThumbnailsOKThumbnails
|
||||
o.Value = v
|
||||
o.Set = false
|
||||
}
|
||||
|
||||
// SetTo sets value to v.
|
||||
func (o *OptBatchUserThumbnailsOKThumbnails) SetTo(v BatchUserThumbnailsOKThumbnails) {
|
||||
o.Set = true
|
||||
o.Value = v
|
||||
}
|
||||
|
||||
// Get returns value and boolean that denotes whether value was set.
|
||||
func (o OptBatchUserThumbnailsOKThumbnails) Get() (v BatchUserThumbnailsOKThumbnails, ok bool) {
|
||||
if !o.Set {
|
||||
return v, false
|
||||
}
|
||||
return o.Value, true
|
||||
}
|
||||
|
||||
// Or returns value if set, or given parameter if does not.
|
||||
func (o OptBatchUserThumbnailsOKThumbnails) Or(d BatchUserThumbnailsOKThumbnails) BatchUserThumbnailsOKThumbnails {
|
||||
if v, ok := o.Get(); ok {
|
||||
return v
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
// NewOptBatchUserThumbnailsReqSize returns new OptBatchUserThumbnailsReqSize with value set to v.
|
||||
func NewOptBatchUserThumbnailsReqSize(v BatchUserThumbnailsReqSize) OptBatchUserThumbnailsReqSize {
|
||||
return OptBatchUserThumbnailsReqSize{
|
||||
Value: v,
|
||||
Set: true,
|
||||
}
|
||||
}
|
||||
|
||||
// OptBatchUserThumbnailsReqSize is optional BatchUserThumbnailsReqSize.
|
||||
type OptBatchUserThumbnailsReqSize struct {
|
||||
Value BatchUserThumbnailsReqSize
|
||||
Set bool
|
||||
}
|
||||
|
||||
// IsSet returns true if OptBatchUserThumbnailsReqSize was set.
|
||||
func (o OptBatchUserThumbnailsReqSize) IsSet() bool { return o.Set }
|
||||
|
||||
// Reset unsets value.
|
||||
func (o *OptBatchUserThumbnailsReqSize) Reset() {
|
||||
var v BatchUserThumbnailsReqSize
|
||||
o.Value = v
|
||||
o.Set = false
|
||||
}
|
||||
|
||||
// SetTo sets value to v.
|
||||
func (o *OptBatchUserThumbnailsReqSize) SetTo(v BatchUserThumbnailsReqSize) {
|
||||
o.Set = true
|
||||
o.Value = v
|
||||
}
|
||||
|
||||
// Get returns value and boolean that denotes whether value was set.
|
||||
func (o OptBatchUserThumbnailsReqSize) Get() (v BatchUserThumbnailsReqSize, ok bool) {
|
||||
if !o.Set {
|
||||
return v, false
|
||||
}
|
||||
return o.Value, true
|
||||
}
|
||||
|
||||
// Or returns value if set, or given parameter if does not.
|
||||
func (o OptBatchUserThumbnailsReqSize) Or(d BatchUserThumbnailsReqSize) BatchUserThumbnailsReqSize {
|
||||
if v, ok := o.Get(); ok {
|
||||
return v
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
// NewOptBatchUsernamesOKUsernames returns new OptBatchUsernamesOKUsernames with value set to v.
|
||||
func NewOptBatchUsernamesOKUsernames(v BatchUsernamesOKUsernames) OptBatchUsernamesOKUsernames {
|
||||
return OptBatchUsernamesOKUsernames{
|
||||
Value: v,
|
||||
Set: true,
|
||||
}
|
||||
}
|
||||
|
||||
// OptBatchUsernamesOKUsernames is optional BatchUsernamesOKUsernames.
|
||||
type OptBatchUsernamesOKUsernames struct {
|
||||
Value BatchUsernamesOKUsernames
|
||||
Set bool
|
||||
}
|
||||
|
||||
// IsSet returns true if OptBatchUsernamesOKUsernames was set.
|
||||
func (o OptBatchUsernamesOKUsernames) IsSet() bool { return o.Set }
|
||||
|
||||
// Reset unsets value.
|
||||
func (o *OptBatchUsernamesOKUsernames) Reset() {
|
||||
var v BatchUsernamesOKUsernames
|
||||
o.Value = v
|
||||
o.Set = false
|
||||
}
|
||||
|
||||
// SetTo sets value to v.
|
||||
func (o *OptBatchUsernamesOKUsernames) SetTo(v BatchUsernamesOKUsernames) {
|
||||
o.Set = true
|
||||
o.Value = v
|
||||
}
|
||||
|
||||
// Get returns value and boolean that denotes whether value was set.
|
||||
func (o OptBatchUsernamesOKUsernames) Get() (v BatchUsernamesOKUsernames, ok bool) {
|
||||
if !o.Set {
|
||||
return v, false
|
||||
}
|
||||
return o.Value, true
|
||||
}
|
||||
|
||||
// Or returns value if set, or given parameter if does not.
|
||||
func (o OptBatchUsernamesOKUsernames) Or(d BatchUsernamesOKUsernames) BatchUsernamesOKUsernames {
|
||||
if v, ok := o.Get(); ok {
|
||||
return v
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
// NewOptGetAssetThumbnailSize returns new OptGetAssetThumbnailSize with value set to v.
|
||||
func NewOptGetAssetThumbnailSize(v GetAssetThumbnailSize) OptGetAssetThumbnailSize {
|
||||
return OptGetAssetThumbnailSize{
|
||||
Value: v,
|
||||
Set: true,
|
||||
}
|
||||
}
|
||||
|
||||
// OptGetAssetThumbnailSize is optional GetAssetThumbnailSize.
|
||||
type OptGetAssetThumbnailSize struct {
|
||||
Value GetAssetThumbnailSize
|
||||
Set bool
|
||||
}
|
||||
|
||||
// IsSet returns true if OptGetAssetThumbnailSize was set.
|
||||
func (o OptGetAssetThumbnailSize) IsSet() bool { return o.Set }
|
||||
|
||||
// Reset unsets value.
|
||||
func (o *OptGetAssetThumbnailSize) Reset() {
|
||||
var v GetAssetThumbnailSize
|
||||
o.Value = v
|
||||
o.Set = false
|
||||
}
|
||||
|
||||
// SetTo sets value to v.
|
||||
func (o *OptGetAssetThumbnailSize) SetTo(v GetAssetThumbnailSize) {
|
||||
o.Set = true
|
||||
o.Value = v
|
||||
}
|
||||
|
||||
// Get returns value and boolean that denotes whether value was set.
|
||||
func (o OptGetAssetThumbnailSize) Get() (v GetAssetThumbnailSize, ok bool) {
|
||||
if !o.Set {
|
||||
return v, false
|
||||
}
|
||||
return o.Value, true
|
||||
}
|
||||
|
||||
// Or returns value if set, or given parameter if does not.
|
||||
func (o OptGetAssetThumbnailSize) Or(d GetAssetThumbnailSize) GetAssetThumbnailSize {
|
||||
if v, ok := o.Get(); ok {
|
||||
return v
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
// NewOptGetUserThumbnailSize returns new OptGetUserThumbnailSize with value set to v.
|
||||
func NewOptGetUserThumbnailSize(v GetUserThumbnailSize) OptGetUserThumbnailSize {
|
||||
return OptGetUserThumbnailSize{
|
||||
Value: v,
|
||||
Set: true,
|
||||
}
|
||||
}
|
||||
|
||||
// OptGetUserThumbnailSize is optional GetUserThumbnailSize.
|
||||
type OptGetUserThumbnailSize struct {
|
||||
Value GetUserThumbnailSize
|
||||
Set bool
|
||||
}
|
||||
|
||||
// IsSet returns true if OptGetUserThumbnailSize was set.
|
||||
func (o OptGetUserThumbnailSize) IsSet() bool { return o.Set }
|
||||
|
||||
// Reset unsets value.
|
||||
func (o *OptGetUserThumbnailSize) Reset() {
|
||||
var v GetUserThumbnailSize
|
||||
o.Value = v
|
||||
o.Set = false
|
||||
}
|
||||
|
||||
// SetTo sets value to v.
|
||||
func (o *OptGetUserThumbnailSize) SetTo(v GetUserThumbnailSize) {
|
||||
o.Set = true
|
||||
o.Value = v
|
||||
}
|
||||
|
||||
// Get returns value and boolean that denotes whether value was set.
|
||||
func (o OptGetUserThumbnailSize) Get() (v GetUserThumbnailSize, ok bool) {
|
||||
if !o.Set {
|
||||
return v, false
|
||||
}
|
||||
return o.Value, true
|
||||
}
|
||||
|
||||
// Or returns value if set, or given parameter if does not.
|
||||
func (o OptGetUserThumbnailSize) Or(d GetUserThumbnailSize) GetUserThumbnailSize {
|
||||
if v, ok := o.Get(); ok {
|
||||
return v
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
// NewOptInt32 returns new OptInt32 with value set to v.
|
||||
func NewOptInt32(v int32) OptInt32 {
|
||||
return OptInt32{
|
||||
@@ -913,9 +1638,6 @@ func (s *ReleaseInfo) SetDate(val time.Time) {
|
||||
s.Date = val
|
||||
}
|
||||
|
||||
// ReleaseSubmissionsCreated is response for ReleaseSubmissions operation.
|
||||
type ReleaseSubmissionsCreated struct{}
|
||||
|
||||
// Ref: #/components/schemas/Roles
|
||||
type Roles struct {
|
||||
Roles int32 `json:"Roles"`
|
||||
@@ -1277,6 +1999,83 @@ type SetMapfixCompletedNoContent struct{}
|
||||
// SetSubmissionCompletedNoContent is response for SetSubmissionCompleted operation.
|
||||
type SetSubmissionCompletedNoContent struct{}
|
||||
|
||||
// Aggregate statistics for submissions and mapfixes.
|
||||
// Ref: #/components/schemas/Stats
|
||||
type Stats struct {
|
||||
// Total number of submissions.
|
||||
TotalSubmissions int64 `json:"TotalSubmissions"`
|
||||
// Total number of mapfixes.
|
||||
TotalMapfixes int64 `json:"TotalMapfixes"`
|
||||
// Number of released submissions.
|
||||
ReleasedSubmissions int64 `json:"ReleasedSubmissions"`
|
||||
// Number of released mapfixes.
|
||||
ReleasedMapfixes int64 `json:"ReleasedMapfixes"`
|
||||
// Number of submissions under review.
|
||||
SubmittedSubmissions int64 `json:"SubmittedSubmissions"`
|
||||
// Number of mapfixes under review.
|
||||
SubmittedMapfixes int64 `json:"SubmittedMapfixes"`
|
||||
}
|
||||
|
||||
// GetTotalSubmissions returns the value of TotalSubmissions.
|
||||
func (s *Stats) GetTotalSubmissions() int64 {
|
||||
return s.TotalSubmissions
|
||||
}
|
||||
|
||||
// GetTotalMapfixes returns the value of TotalMapfixes.
|
||||
func (s *Stats) GetTotalMapfixes() int64 {
|
||||
return s.TotalMapfixes
|
||||
}
|
||||
|
||||
// GetReleasedSubmissions returns the value of ReleasedSubmissions.
|
||||
func (s *Stats) GetReleasedSubmissions() int64 {
|
||||
return s.ReleasedSubmissions
|
||||
}
|
||||
|
||||
// GetReleasedMapfixes returns the value of ReleasedMapfixes.
|
||||
func (s *Stats) GetReleasedMapfixes() int64 {
|
||||
return s.ReleasedMapfixes
|
||||
}
|
||||
|
||||
// GetSubmittedSubmissions returns the value of SubmittedSubmissions.
|
||||
func (s *Stats) GetSubmittedSubmissions() int64 {
|
||||
return s.SubmittedSubmissions
|
||||
}
|
||||
|
||||
// GetSubmittedMapfixes returns the value of SubmittedMapfixes.
|
||||
func (s *Stats) GetSubmittedMapfixes() int64 {
|
||||
return s.SubmittedMapfixes
|
||||
}
|
||||
|
||||
// SetTotalSubmissions sets the value of TotalSubmissions.
|
||||
func (s *Stats) SetTotalSubmissions(val int64) {
|
||||
s.TotalSubmissions = val
|
||||
}
|
||||
|
||||
// SetTotalMapfixes sets the value of TotalMapfixes.
|
||||
func (s *Stats) SetTotalMapfixes(val int64) {
|
||||
s.TotalMapfixes = val
|
||||
}
|
||||
|
||||
// SetReleasedSubmissions sets the value of ReleasedSubmissions.
|
||||
func (s *Stats) SetReleasedSubmissions(val int64) {
|
||||
s.ReleasedSubmissions = val
|
||||
}
|
||||
|
||||
// SetReleasedMapfixes sets the value of ReleasedMapfixes.
|
||||
func (s *Stats) SetReleasedMapfixes(val int64) {
|
||||
s.ReleasedMapfixes = val
|
||||
}
|
||||
|
||||
// SetSubmittedSubmissions sets the value of SubmittedSubmissions.
|
||||
func (s *Stats) SetSubmittedSubmissions(val int64) {
|
||||
s.SubmittedSubmissions = val
|
||||
}
|
||||
|
||||
// SetSubmittedMapfixes sets the value of SubmittedMapfixes.
|
||||
func (s *Stats) SetSubmittedMapfixes(val int64) {
|
||||
s.SubmittedMapfixes = val
|
||||
}
|
||||
|
||||
// Ref: #/components/schemas/Submission
|
||||
type Submission struct {
|
||||
ID int64 `json:"ID"`
|
||||
@@ -1509,6 +2308,23 @@ func (s *Submissions) SetSubmissions(val []Submission) {
|
||||
s.Submissions = val
|
||||
}
|
||||
|
||||
// UpdateMapfixDescriptionNoContent is response for UpdateMapfixDescription operation.
|
||||
type UpdateMapfixDescriptionNoContent struct{}
|
||||
|
||||
type UpdateMapfixDescriptionReq struct {
|
||||
Data io.Reader
|
||||
}
|
||||
|
||||
// Read reads data from the Data reader.
|
||||
//
|
||||
// Kept to satisfy the io.Reader interface.
|
||||
func (s UpdateMapfixDescriptionReq) Read(p []byte) (n int, err error) {
|
||||
if s.Data == nil {
|
||||
return 0, io.EOF
|
||||
}
|
||||
return s.Data.Read(p)
|
||||
}
|
||||
|
||||
// UpdateMapfixModelNoContent is response for UpdateMapfixModel operation.
|
||||
type UpdateMapfixModelNoContent struct{}
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/go-faster/errors"
|
||||
|
||||
"github.com/ogen-go/ogen/ogenerrors"
|
||||
)
|
||||
|
||||
@@ -40,10 +39,12 @@ var operationRolesCookieAuth = map[string][]string{
|
||||
ActionMapfixResetSubmittingOperation: []string{},
|
||||
ActionMapfixRetryValidateOperation: []string{},
|
||||
ActionMapfixRevokeOperation: []string{},
|
||||
ActionMapfixTriggerReleaseOperation: []string{},
|
||||
ActionMapfixTriggerSubmitOperation: []string{},
|
||||
ActionMapfixTriggerSubmitUncheckedOperation: []string{},
|
||||
ActionMapfixTriggerUploadOperation: []string{},
|
||||
ActionMapfixTriggerValidateOperation: []string{},
|
||||
ActionMapfixUploadedOperation: []string{},
|
||||
ActionMapfixValidatedOperation: []string{},
|
||||
ActionSubmissionAcceptedOperation: []string{},
|
||||
ActionSubmissionRejectOperation: []string{},
|
||||
@@ -73,6 +74,7 @@ var operationRolesCookieAuth = map[string][]string{
|
||||
SessionValidateOperation: []string{},
|
||||
SetMapfixCompletedOperation: []string{},
|
||||
SetSubmissionCompletedOperation: []string{},
|
||||
UpdateMapfixDescriptionOperation: []string{},
|
||||
UpdateMapfixModelOperation: []string{},
|
||||
UpdateScriptOperation: []string{},
|
||||
UpdateScriptPolicyOperation: []string{},
|
||||
|
||||
@@ -45,6 +45,12 @@ type Handler interface {
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/revoke
|
||||
ActionMapfixRevoke(ctx context.Context, params ActionMapfixRevokeParams) error
|
||||
// ActionMapfixTriggerRelease implements actionMapfixTriggerRelease operation.
|
||||
//
|
||||
// Role MapfixUpload changes status from Uploaded -> Releasing.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/trigger-release
|
||||
ActionMapfixTriggerRelease(ctx context.Context, params ActionMapfixTriggerReleaseParams) error
|
||||
// ActionMapfixTriggerSubmit implements actionMapfixTriggerSubmit operation.
|
||||
//
|
||||
// Role Submitter changes status from UnderConstruction|ChangesRequested -> Submitting.
|
||||
@@ -59,7 +65,7 @@ type Handler interface {
|
||||
ActionMapfixTriggerSubmitUnchecked(ctx context.Context, params ActionMapfixTriggerSubmitUncheckedParams) error
|
||||
// ActionMapfixTriggerUpload implements actionMapfixTriggerUpload operation.
|
||||
//
|
||||
// Role Admin changes status from Validated -> Uploading.
|
||||
// Role MapfixUpload changes status from Validated -> Uploading.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/trigger-upload
|
||||
ActionMapfixTriggerUpload(ctx context.Context, params ActionMapfixTriggerUploadParams) error
|
||||
@@ -69,9 +75,15 @@ type Handler interface {
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/trigger-validate
|
||||
ActionMapfixTriggerValidate(ctx context.Context, params ActionMapfixTriggerValidateParams) error
|
||||
// ActionMapfixUploaded implements actionMapfixUploaded operation.
|
||||
//
|
||||
// Role MapfixUpload manually resets releasing softlock and changes status from Releasing -> Uploaded.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/reset-releasing
|
||||
ActionMapfixUploaded(ctx context.Context, params ActionMapfixUploadedParams) error
|
||||
// ActionMapfixValidated implements actionMapfixValidated operation.
|
||||
//
|
||||
// Role Admin manually resets uploading softlock and changes status from Uploading -> Validated.
|
||||
// Role MapfixUpload manually resets uploading softlock and changes status from Uploading -> Validated.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/reset-uploading
|
||||
ActionMapfixValidated(ctx context.Context, params ActionMapfixValidatedParams) error
|
||||
@@ -126,7 +138,7 @@ type Handler interface {
|
||||
ActionSubmissionTriggerSubmitUnchecked(ctx context.Context, params ActionSubmissionTriggerSubmitUncheckedParams) error
|
||||
// ActionSubmissionTriggerUpload implements actionSubmissionTriggerUpload operation.
|
||||
//
|
||||
// Role Admin changes status from Validated -> Uploading.
|
||||
// Role SubmissionUpload changes status from Validated -> Uploading.
|
||||
//
|
||||
// POST /submissions/{SubmissionID}/status/trigger-upload
|
||||
ActionSubmissionTriggerUpload(ctx context.Context, params ActionSubmissionTriggerUploadParams) error
|
||||
@@ -138,10 +150,29 @@ type Handler interface {
|
||||
ActionSubmissionTriggerValidate(ctx context.Context, params ActionSubmissionTriggerValidateParams) error
|
||||
// ActionSubmissionValidated implements actionSubmissionValidated operation.
|
||||
//
|
||||
// Role Admin manually resets uploading softlock and changes status from Uploading -> Validated.
|
||||
// Role SubmissionUpload manually resets uploading softlock and changes status from Uploading ->
|
||||
// Validated.
|
||||
//
|
||||
// POST /submissions/{SubmissionID}/status/reset-uploading
|
||||
ActionSubmissionValidated(ctx context.Context, params ActionSubmissionValidatedParams) error
|
||||
// BatchAssetThumbnails implements batchAssetThumbnails operation.
|
||||
//
|
||||
// Batch fetch asset thumbnails.
|
||||
//
|
||||
// POST /thumbnails/assets
|
||||
BatchAssetThumbnails(ctx context.Context, req *BatchAssetThumbnailsReq) (*BatchAssetThumbnailsOK, error)
|
||||
// BatchUserThumbnails implements batchUserThumbnails operation.
|
||||
//
|
||||
// Batch fetch user avatar thumbnails.
|
||||
//
|
||||
// POST /thumbnails/users
|
||||
BatchUserThumbnails(ctx context.Context, req *BatchUserThumbnailsReq) (*BatchUserThumbnailsOK, error)
|
||||
// BatchUsernames implements batchUsernames operation.
|
||||
//
|
||||
// Batch fetch usernames.
|
||||
//
|
||||
// POST /usernames
|
||||
BatchUsernames(ctx context.Context, req *BatchUsernamesReq) (*BatchUsernamesOK, error)
|
||||
// CreateMapfix implements createMapfix operation.
|
||||
//
|
||||
// Trigger the validator to create a mapfix.
|
||||
@@ -202,6 +233,12 @@ type Handler interface {
|
||||
//
|
||||
// GET /maps/{MapID}/download
|
||||
DownloadMapAsset(ctx context.Context, params DownloadMapAssetParams) (DownloadMapAssetOK, error)
|
||||
// GetAssetThumbnail implements getAssetThumbnail operation.
|
||||
//
|
||||
// Get single asset thumbnail.
|
||||
//
|
||||
// GET /thumbnails/asset/{AssetID}
|
||||
GetAssetThumbnail(ctx context.Context, params GetAssetThumbnailParams) (*GetAssetThumbnailFound, error)
|
||||
// GetMap implements getMap operation.
|
||||
//
|
||||
// Retrieve map with ID.
|
||||
@@ -232,12 +269,24 @@ type Handler interface {
|
||||
//
|
||||
// GET /script-policy/{ScriptPolicyID}
|
||||
GetScriptPolicy(ctx context.Context, params GetScriptPolicyParams) (*ScriptPolicy, error)
|
||||
// GetStats implements getStats operation.
|
||||
//
|
||||
// Get aggregate statistics.
|
||||
//
|
||||
// GET /stats
|
||||
GetStats(ctx context.Context) (*Stats, error)
|
||||
// GetSubmission implements getSubmission operation.
|
||||
//
|
||||
// Retrieve map with ID.
|
||||
//
|
||||
// GET /submissions/{SubmissionID}
|
||||
GetSubmission(ctx context.Context, params GetSubmissionParams) (*Submission, error)
|
||||
// GetUserThumbnail implements getUserThumbnail operation.
|
||||
//
|
||||
// Get single user avatar thumbnail.
|
||||
//
|
||||
// GET /thumbnails/user/{UserID}
|
||||
GetUserThumbnail(ctx context.Context, params GetUserThumbnailParams) (*GetUserThumbnailFound, error)
|
||||
// ListMapfixAuditEvents implements listMapfixAuditEvents operation.
|
||||
//
|
||||
// Retrieve a list of audit events.
|
||||
@@ -282,10 +331,10 @@ type Handler interface {
|
||||
ListSubmissions(ctx context.Context, params ListSubmissionsParams) (*Submissions, error)
|
||||
// ReleaseSubmissions implements releaseSubmissions operation.
|
||||
//
|
||||
// Release a set of uploaded maps.
|
||||
// Release a set of uploaded maps. Role SubmissionRelease.
|
||||
//
|
||||
// POST /release-submissions
|
||||
ReleaseSubmissions(ctx context.Context, req []ReleaseInfo) error
|
||||
ReleaseSubmissions(ctx context.Context, req []ReleaseInfo) (*OperationID, error)
|
||||
// SessionRoles implements sessionRoles operation.
|
||||
//
|
||||
// Get list of roles for the current session.
|
||||
@@ -316,6 +365,12 @@ type Handler interface {
|
||||
//
|
||||
// POST /submissions/{SubmissionID}/completed
|
||||
SetSubmissionCompleted(ctx context.Context, params SetSubmissionCompletedParams) error
|
||||
// UpdateMapfixDescription implements updateMapfixDescription operation.
|
||||
//
|
||||
// Update description (submitter only).
|
||||
//
|
||||
// PATCH /mapfixes/{MapfixID}/description
|
||||
UpdateMapfixDescription(ctx context.Context, req UpdateMapfixDescriptionReq, params UpdateMapfixDescriptionParams) error
|
||||
// UpdateMapfixModel implements updateMapfixModel operation.
|
||||
//
|
||||
// Update model following role restrictions.
|
||||
|
||||
@@ -68,6 +68,15 @@ func (UnimplementedHandler) ActionMapfixRevoke(ctx context.Context, params Actio
|
||||
return ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// ActionMapfixTriggerRelease implements actionMapfixTriggerRelease operation.
|
||||
//
|
||||
// Role MapfixUpload changes status from Uploaded -> Releasing.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/trigger-release
|
||||
func (UnimplementedHandler) ActionMapfixTriggerRelease(ctx context.Context, params ActionMapfixTriggerReleaseParams) error {
|
||||
return ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// ActionMapfixTriggerSubmit implements actionMapfixTriggerSubmit operation.
|
||||
//
|
||||
// Role Submitter changes status from UnderConstruction|ChangesRequested -> Submitting.
|
||||
@@ -88,7 +97,7 @@ func (UnimplementedHandler) ActionMapfixTriggerSubmitUnchecked(ctx context.Conte
|
||||
|
||||
// ActionMapfixTriggerUpload implements actionMapfixTriggerUpload operation.
|
||||
//
|
||||
// Role Admin changes status from Validated -> Uploading.
|
||||
// Role MapfixUpload changes status from Validated -> Uploading.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/trigger-upload
|
||||
func (UnimplementedHandler) ActionMapfixTriggerUpload(ctx context.Context, params ActionMapfixTriggerUploadParams) error {
|
||||
@@ -104,9 +113,18 @@ func (UnimplementedHandler) ActionMapfixTriggerValidate(ctx context.Context, par
|
||||
return ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// ActionMapfixUploaded implements actionMapfixUploaded operation.
|
||||
//
|
||||
// Role MapfixUpload manually resets releasing softlock and changes status from Releasing -> Uploaded.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/reset-releasing
|
||||
func (UnimplementedHandler) ActionMapfixUploaded(ctx context.Context, params ActionMapfixUploadedParams) error {
|
||||
return ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// ActionMapfixValidated implements actionMapfixValidated operation.
|
||||
//
|
||||
// Role Admin manually resets uploading softlock and changes status from Uploading -> Validated.
|
||||
// Role MapfixUpload manually resets uploading softlock and changes status from Uploading -> Validated.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/reset-uploading
|
||||
func (UnimplementedHandler) ActionMapfixValidated(ctx context.Context, params ActionMapfixValidatedParams) error {
|
||||
@@ -188,7 +206,7 @@ func (UnimplementedHandler) ActionSubmissionTriggerSubmitUnchecked(ctx context.C
|
||||
|
||||
// ActionSubmissionTriggerUpload implements actionSubmissionTriggerUpload operation.
|
||||
//
|
||||
// Role Admin changes status from Validated -> Uploading.
|
||||
// Role SubmissionUpload changes status from Validated -> Uploading.
|
||||
//
|
||||
// POST /submissions/{SubmissionID}/status/trigger-upload
|
||||
func (UnimplementedHandler) ActionSubmissionTriggerUpload(ctx context.Context, params ActionSubmissionTriggerUploadParams) error {
|
||||
@@ -206,13 +224,41 @@ func (UnimplementedHandler) ActionSubmissionTriggerValidate(ctx context.Context,
|
||||
|
||||
// ActionSubmissionValidated implements actionSubmissionValidated operation.
|
||||
//
|
||||
// Role Admin manually resets uploading softlock and changes status from Uploading -> Validated.
|
||||
// Role SubmissionUpload manually resets uploading softlock and changes status from Uploading ->
|
||||
// Validated.
|
||||
//
|
||||
// POST /submissions/{SubmissionID}/status/reset-uploading
|
||||
func (UnimplementedHandler) ActionSubmissionValidated(ctx context.Context, params ActionSubmissionValidatedParams) error {
|
||||
return ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// BatchAssetThumbnails implements batchAssetThumbnails operation.
|
||||
//
|
||||
// Batch fetch asset thumbnails.
|
||||
//
|
||||
// POST /thumbnails/assets
|
||||
func (UnimplementedHandler) BatchAssetThumbnails(ctx context.Context, req *BatchAssetThumbnailsReq) (r *BatchAssetThumbnailsOK, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// BatchUserThumbnails implements batchUserThumbnails operation.
|
||||
//
|
||||
// Batch fetch user avatar thumbnails.
|
||||
//
|
||||
// POST /thumbnails/users
|
||||
func (UnimplementedHandler) BatchUserThumbnails(ctx context.Context, req *BatchUserThumbnailsReq) (r *BatchUserThumbnailsOK, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// BatchUsernames implements batchUsernames operation.
|
||||
//
|
||||
// Batch fetch usernames.
|
||||
//
|
||||
// POST /usernames
|
||||
func (UnimplementedHandler) BatchUsernames(ctx context.Context, req *BatchUsernamesReq) (r *BatchUsernamesOK, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// CreateMapfix implements createMapfix operation.
|
||||
//
|
||||
// Trigger the validator to create a mapfix.
|
||||
@@ -303,6 +349,15 @@ func (UnimplementedHandler) DownloadMapAsset(ctx context.Context, params Downloa
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// GetAssetThumbnail implements getAssetThumbnail operation.
|
||||
//
|
||||
// Get single asset thumbnail.
|
||||
//
|
||||
// GET /thumbnails/asset/{AssetID}
|
||||
func (UnimplementedHandler) GetAssetThumbnail(ctx context.Context, params GetAssetThumbnailParams) (r *GetAssetThumbnailFound, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// GetMap implements getMap operation.
|
||||
//
|
||||
// Retrieve map with ID.
|
||||
@@ -348,6 +403,15 @@ func (UnimplementedHandler) GetScriptPolicy(ctx context.Context, params GetScrip
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// GetStats implements getStats operation.
|
||||
//
|
||||
// Get aggregate statistics.
|
||||
//
|
||||
// GET /stats
|
||||
func (UnimplementedHandler) GetStats(ctx context.Context) (r *Stats, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// GetSubmission implements getSubmission operation.
|
||||
//
|
||||
// Retrieve map with ID.
|
||||
@@ -357,6 +421,15 @@ func (UnimplementedHandler) GetSubmission(ctx context.Context, params GetSubmiss
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// GetUserThumbnail implements getUserThumbnail operation.
|
||||
//
|
||||
// Get single user avatar thumbnail.
|
||||
//
|
||||
// GET /thumbnails/user/{UserID}
|
||||
func (UnimplementedHandler) GetUserThumbnail(ctx context.Context, params GetUserThumbnailParams) (r *GetUserThumbnailFound, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// ListMapfixAuditEvents implements listMapfixAuditEvents operation.
|
||||
//
|
||||
// Retrieve a list of audit events.
|
||||
@@ -422,11 +495,11 @@ func (UnimplementedHandler) ListSubmissions(ctx context.Context, params ListSubm
|
||||
|
||||
// ReleaseSubmissions implements releaseSubmissions operation.
|
||||
//
|
||||
// Release a set of uploaded maps.
|
||||
// Release a set of uploaded maps. Role SubmissionRelease.
|
||||
//
|
||||
// POST /release-submissions
|
||||
func (UnimplementedHandler) ReleaseSubmissions(ctx context.Context, req []ReleaseInfo) error {
|
||||
return ht.ErrNotImplemented
|
||||
func (UnimplementedHandler) ReleaseSubmissions(ctx context.Context, req []ReleaseInfo) (r *OperationID, _ error) {
|
||||
return r, ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// SessionRoles implements sessionRoles operation.
|
||||
@@ -474,6 +547,15 @@ func (UnimplementedHandler) SetSubmissionCompleted(ctx context.Context, params S
|
||||
return ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// UpdateMapfixDescription implements updateMapfixDescription operation.
|
||||
//
|
||||
// Update description (submitter only).
|
||||
//
|
||||
// PATCH /mapfixes/{MapfixID}/description
|
||||
func (UnimplementedHandler) UpdateMapfixDescription(ctx context.Context, req UpdateMapfixDescriptionReq, params UpdateMapfixDescriptionParams) error {
|
||||
return ht.ErrNotImplemented
|
||||
}
|
||||
|
||||
// UpdateMapfixModel implements updateMapfixModel operation.
|
||||
//
|
||||
// Update model following role restrictions.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -8,6 +8,8 @@ import (
|
||||
"git.itzana.me/strafesnet/go-grpc/auth"
|
||||
"git.itzana.me/strafesnet/go-grpc/maps"
|
||||
"git.itzana.me/strafesnet/go-grpc/maps_extended"
|
||||
"git.itzana.me/strafesnet/go-grpc/mapfixes"
|
||||
"git.itzana.me/strafesnet/go-grpc/submissions"
|
||||
"git.itzana.me/strafesnet/go-grpc/users"
|
||||
"git.itzana.me/strafesnet/go-grpc/validator"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/api"
|
||||
@@ -18,6 +20,7 @@ import (
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/validator_controller"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/web_api"
|
||||
"github.com/nats-io/nats.go"
|
||||
"github.com/redis/go-redis/v9"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/urfave/cli/v2"
|
||||
"google.golang.org/grpc"
|
||||
@@ -102,6 +105,24 @@ func NewServeCommand() *cli.Command {
|
||||
EnvVars: []string{"RBX_API_KEY"},
|
||||
Required: true,
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "redis-host",
|
||||
Usage: "Host of Redis cache",
|
||||
EnvVars: []string{"REDIS_HOST"},
|
||||
Value: "localhost:6379",
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "redis-password",
|
||||
Usage: "Password for Redis",
|
||||
EnvVars: []string{"REDIS_PASSWORD"},
|
||||
Value: "",
|
||||
},
|
||||
&cli.IntFlag{
|
||||
Name: "redis-db",
|
||||
Usage: "Redis database number",
|
||||
EnvVars: []string{"REDIS_DB"},
|
||||
Value: 0,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -129,6 +150,24 @@ func serve(ctx *cli.Context) error {
|
||||
log.WithError(err).Fatal("failed to add stream")
|
||||
}
|
||||
|
||||
// Initialize Redis client
|
||||
redisClient := redis.NewClient(&redis.Options{
|
||||
Addr: ctx.String("redis-host"),
|
||||
Password: ctx.String("redis-password"),
|
||||
DB: ctx.Int("redis-db"),
|
||||
})
|
||||
|
||||
// Test Redis connection
|
||||
if err := redisClient.Ping(ctx.Context).Err(); err != nil {
|
||||
log.WithError(err).Warn("failed to connect to Redis - thumbnails will not be cached")
|
||||
}
|
||||
|
||||
// Initialize Roblox client
|
||||
robloxClient := &roblox.Client{
|
||||
HttpClient: http.DefaultClient,
|
||||
ApiKey: ctx.String("rbx-api-key"),
|
||||
}
|
||||
|
||||
// connect to main game database
|
||||
conn, err := grpc.Dial(ctx.String("data-rpc-host"), grpc.WithTransportCredentials(insecure.NewCredentials()))
|
||||
if err != nil {
|
||||
@@ -139,13 +178,15 @@ func serve(ctx *cli.Context) error {
|
||||
js,
|
||||
maps.NewMapsServiceClient(conn),
|
||||
users.NewUsersServiceClient(conn),
|
||||
robloxClient,
|
||||
redisClient,
|
||||
)
|
||||
|
||||
svc_external := web_api.NewService(
|
||||
&svc_inner,
|
||||
roblox.Client{
|
||||
HttpClient: http.DefaultClient,
|
||||
ApiKey: ctx.String("rbx-api-key"),
|
||||
ApiKey: ctx.String("rbx-api-key"),
|
||||
},
|
||||
)
|
||||
|
||||
@@ -165,7 +206,11 @@ func serve(ctx *cli.Context) error {
|
||||
grpcServer := grpc.NewServer()
|
||||
|
||||
maps_controller := controller.NewMapsController(&svc_inner)
|
||||
mapfixes_controller := controller.NewMapfixesController(&svc_inner)
|
||||
submissions_controller := controller.NewSubmissionsController(&svc_inner)
|
||||
maps_extended.RegisterMapsServiceServer(grpcServer,&maps_controller)
|
||||
mapfixes.RegisterMapfixesServiceServer(grpcServer,&mapfixes_controller)
|
||||
submissions.RegisterSubmissionsServiceServer(grpcServer,&submissions_controller)
|
||||
|
||||
mapfix_controller := validator_controller.NewMapfixesController(&svc_inner)
|
||||
operation_controller := validator_controller.NewOperationsController(&svc_inner)
|
||||
|
||||
149
pkg/controller/mapfixes.go
Normal file
149
pkg/controller/mapfixes.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package controller
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"git.itzana.me/strafesnet/go-grpc/mapfixes"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/service"
|
||||
)
|
||||
|
||||
type Mapfixes struct {
|
||||
*mapfixes.UnimplementedMapfixesServiceServer
|
||||
inner *service.Service
|
||||
}
|
||||
|
||||
func NewMapfixesController(
|
||||
inner *service.Service,
|
||||
) Mapfixes {
|
||||
return Mapfixes{
|
||||
inner: inner,
|
||||
}
|
||||
}
|
||||
|
||||
func (svc *Mapfixes) Get(ctx context.Context, request *mapfixes.MapfixId) (*mapfixes.MapfixResponse, error) {
|
||||
item, err := svc.inner.GetMapfix(ctx, request.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
return &mapfixes.MapfixResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
TargetAssetID: item.TargetAssetID,
|
||||
StatusID: mapfixes.MapfixStatus(item.StatusID),
|
||||
}, nil
|
||||
}
|
||||
func (svc *Mapfixes) GetList(ctx context.Context, request *mapfixes.MapfixIdList) (*mapfixes.MapfixList, error) {
|
||||
items, err := svc.inner.GetMapfixList(ctx, request.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp := mapfixes.MapfixList{}
|
||||
resp.Mapfixes = make([]*mapfixes.MapfixResponse, len(items))
|
||||
for i, item := range items {
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
resp.Mapfixes[i] = &mapfixes.MapfixResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
TargetAssetID: item.TargetAssetID,
|
||||
StatusID: mapfixes.MapfixStatus(item.StatusID),
|
||||
}
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
func (svc *Mapfixes) List(ctx context.Context, request *mapfixes.ListRequest) (*mapfixes.MapfixList, error) {
|
||||
if request.Page == nil {
|
||||
return nil, PageError
|
||||
}
|
||||
|
||||
filter := service.NewMapfixFilter()
|
||||
if request.Filter != nil {
|
||||
if request.Filter.DisplayName != nil {
|
||||
filter.SetDisplayName(*request.Filter.DisplayName)
|
||||
}
|
||||
if request.Filter.Creator != nil {
|
||||
filter.SetCreator(*request.Filter.Creator)
|
||||
}
|
||||
if request.Filter.GameID != nil {
|
||||
filter.SetGameID(*request.Filter.GameID)
|
||||
}
|
||||
if request.Filter.Submitter != nil {
|
||||
filter.SetSubmitter(*request.Filter.Submitter)
|
||||
}
|
||||
}
|
||||
|
||||
items, err := svc.inner.ListMapfixes(ctx, filter, model.Page{
|
||||
Number: int32(request.Page.Number),
|
||||
Size: int32(request.Page.Size),
|
||||
}, datastore.ListSortDateDescending)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp := mapfixes.MapfixList{}
|
||||
resp.Mapfixes = make([]*mapfixes.MapfixResponse, len(items))
|
||||
for i, item := range items {
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
resp.Mapfixes[i] = &mapfixes.MapfixResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
TargetAssetID: item.TargetAssetID,
|
||||
StatusID: mapfixes.MapfixStatus(item.StatusID),
|
||||
}
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
161
pkg/controller/submissions.go
Normal file
161
pkg/controller/submissions.go
Normal file
@@ -0,0 +1,161 @@
|
||||
package controller
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"git.itzana.me/strafesnet/go-grpc/submissions"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/service"
|
||||
)
|
||||
|
||||
type Submissions struct {
|
||||
*submissions.UnimplementedSubmissionsServiceServer
|
||||
inner *service.Service
|
||||
}
|
||||
|
||||
func NewSubmissionsController(
|
||||
inner *service.Service,
|
||||
) Submissions {
|
||||
return Submissions{
|
||||
inner: inner,
|
||||
}
|
||||
}
|
||||
|
||||
func (svc *Submissions) Get(ctx context.Context, request *submissions.SubmissionId) (*submissions.SubmissionResponse, error) {
|
||||
item, err := svc.inner.GetSubmission(ctx, request.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
var uploaded_asset_id *uint64
|
||||
if item.UploadedAssetID != 0 {
|
||||
uploaded_asset_id = &item.UploadedAssetID
|
||||
}
|
||||
return &submissions.SubmissionResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
UploadedAssetID: uploaded_asset_id,
|
||||
StatusID: submissions.SubmissionStatus(item.StatusID),
|
||||
}, nil
|
||||
}
|
||||
func (svc *Submissions) GetList(ctx context.Context, request *submissions.SubmissionIdList) (*submissions.SubmissionList, error) {
|
||||
items, err := svc.inner.GetSubmissionList(ctx, request.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp := submissions.SubmissionList{}
|
||||
resp.Submissions = make([]*submissions.SubmissionResponse, len(items))
|
||||
for i, item := range items {
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
var uploaded_asset_id *uint64
|
||||
if item.UploadedAssetID != 0 {
|
||||
uploaded_asset_id = &item.UploadedAssetID
|
||||
}
|
||||
resp.Submissions[i] = &submissions.SubmissionResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
UploadedAssetID: uploaded_asset_id,
|
||||
StatusID: submissions.SubmissionStatus(item.StatusID),
|
||||
}
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
func (svc *Submissions) List(ctx context.Context, request *submissions.ListRequest) (*submissions.SubmissionList, error) {
|
||||
if request.Page == nil {
|
||||
return nil, PageError
|
||||
}
|
||||
|
||||
filter := service.NewSubmissionFilter()
|
||||
if request.Filter != nil {
|
||||
if request.Filter.DisplayName != nil {
|
||||
filter.SetDisplayName(*request.Filter.DisplayName)
|
||||
}
|
||||
if request.Filter.Creator != nil {
|
||||
filter.SetCreator(*request.Filter.Creator)
|
||||
}
|
||||
if request.Filter.GameID != nil {
|
||||
filter.SetGameID(*request.Filter.GameID)
|
||||
}
|
||||
if request.Filter.Submitter != nil {
|
||||
filter.SetSubmitter(*request.Filter.Submitter)
|
||||
}
|
||||
}
|
||||
|
||||
items, err := svc.inner.ListSubmissions(ctx, filter, model.Page{
|
||||
Number: int32(request.Page.Number),
|
||||
Size: int32(request.Page.Size),
|
||||
}, datastore.ListSortDateDescending)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp := submissions.SubmissionList{}
|
||||
resp.Submissions = make([]*submissions.SubmissionResponse, len(items))
|
||||
for i, item := range items {
|
||||
var validated_asset_id *uint64
|
||||
if item.ValidatedAssetID != 0 {
|
||||
validated_asset_id = &item.ValidatedAssetID
|
||||
}
|
||||
var validated_asset_version *uint64
|
||||
if item.ValidatedAssetVersion != 0 {
|
||||
validated_asset_version = &item.ValidatedAssetVersion
|
||||
}
|
||||
var uploaded_asset_id *uint64
|
||||
if item.UploadedAssetID != 0 {
|
||||
uploaded_asset_id = &item.UploadedAssetID
|
||||
}
|
||||
resp.Submissions[i] = &submissions.SubmissionResponse{
|
||||
ID: item.ID,
|
||||
DisplayName: item.DisplayName,
|
||||
Creator: item.Creator,
|
||||
GameID: uint32(item.GameID),
|
||||
CreatedAt: item.CreatedAt.Unix(),
|
||||
UpdatedAt: item.UpdatedAt.Unix(),
|
||||
Submitter: uint64(item.Submitter),
|
||||
AssetVersion: uint64(item.AssetVersion),
|
||||
AssetID: item.AssetID,
|
||||
ValidatedAssetID: validated_asset_id,
|
||||
ValidatedAssetVersion: validated_asset_version,
|
||||
UploadedAssetID: uploaded_asset_id,
|
||||
StatusID: submissions.SubmissionStatus(item.StatusID),
|
||||
}
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
@@ -18,10 +18,12 @@ const (
|
||||
MapfixStatusValidating MapfixStatus = 5
|
||||
MapfixStatusValidated MapfixStatus = 6
|
||||
MapfixStatusUploading MapfixStatus = 7
|
||||
MapfixStatusUploaded MapfixStatus = 8 // uploaded to the group, but pending release
|
||||
MapfixStatusReleasing MapfixStatus = 11
|
||||
|
||||
// Phase: Final MapfixStatus
|
||||
MapfixStatusUploaded MapfixStatus = 8 // uploaded to the group, but pending release
|
||||
MapfixStatusRejected MapfixStatus = 9
|
||||
MapfixStatusReleased MapfixStatus = 10
|
||||
)
|
||||
|
||||
type Mapfix struct {
|
||||
|
||||
@@ -65,3 +65,29 @@ type UploadMapfixRequest struct {
|
||||
ModelVersion uint64
|
||||
TargetAssetID uint64
|
||||
}
|
||||
|
||||
type ReleaseSubmissionRequest struct {
|
||||
// Release schedule
|
||||
SubmissionID int64
|
||||
ReleaseDate int64
|
||||
// Model download info
|
||||
ModelID uint64
|
||||
ModelVersion uint64
|
||||
// MapCreate
|
||||
UploadedAssetID uint64
|
||||
DisplayName string
|
||||
Creator string
|
||||
GameID uint32
|
||||
Submitter uint64
|
||||
}
|
||||
type BatchReleaseSubmissionsRequest struct {
|
||||
Submissions []ReleaseSubmissionRequest
|
||||
OperationID int32
|
||||
}
|
||||
|
||||
type ReleaseMapfixRequest struct {
|
||||
MapfixID int64
|
||||
ModelID uint64
|
||||
ModelVersion uint64
|
||||
TargetAssetID uint64
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ type ScriptPolicy struct {
|
||||
// Hash of the source code that leads to this policy.
|
||||
// If this is a replacement mapping, the original source may not be pointed to by any policy.
|
||||
// The original source should still exist in the scripts table, which can be located by the same hash.
|
||||
FromScriptHash int64 // postgres does not support unsigned integers, so we have to pretend
|
||||
FromScriptHash int64 `gorm:"uniqueIndex"` // postgres does not support unsigned integers, so we have to pretend
|
||||
// The ID of the replacement source (ScriptPolicyReplace)
|
||||
// or verbatim source (ScriptPolicyAllowed)
|
||||
// or 0 (other)
|
||||
|
||||
@@ -26,7 +26,7 @@ func HashParse(hash string) (uint64, error){
|
||||
type Script struct {
|
||||
ID int64 `gorm:"primaryKey"`
|
||||
Name string
|
||||
Hash int64 // postgres does not support unsigned integers, so we have to pretend
|
||||
Hash int64 `gorm:"uniqueIndex"` // postgres does not support unsigned integers, so we have to pretend
|
||||
Source string
|
||||
ResourceType ResourceType // is this a submission or is it a mapfix
|
||||
ResourceID int64 // which submission / mapfix did this script first appear in
|
||||
|
||||
@@ -96,7 +96,7 @@ func setupRoutes(cfg *RouterConfig) (*gin.Engine, error) {
|
||||
// Docs
|
||||
public_api.GET("/docs/*any", ginSwagger.WrapHandler(swaggerfiles.Handler))
|
||||
public_api.GET("/", func(ctx *gin.Context) {
|
||||
ctx.Redirect(http.StatusPermanentRedirect, "/docs/index.html")
|
||||
ctx.Redirect(http.StatusPermanentRedirect, "/public-api/docs/index.html")
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
160
pkg/roblox/thumbnails.go
Normal file
160
pkg/roblox/thumbnails.go
Normal file
@@ -0,0 +1,160 @@
|
||||
package roblox
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// ThumbnailSize represents valid Roblox thumbnail sizes
|
||||
type ThumbnailSize string
|
||||
|
||||
const (
|
||||
Size150x150 ThumbnailSize = "150x150"
|
||||
Size420x420 ThumbnailSize = "420x420"
|
||||
Size768x432 ThumbnailSize = "768x432"
|
||||
)
|
||||
|
||||
// ThumbnailFormat represents the image format
|
||||
type ThumbnailFormat string
|
||||
|
||||
const (
|
||||
FormatPng ThumbnailFormat = "Png"
|
||||
FormatJpeg ThumbnailFormat = "Jpeg"
|
||||
)
|
||||
|
||||
// ThumbnailRequest represents a single thumbnail request
|
||||
type ThumbnailRequest struct {
|
||||
RequestID string `json:"requestId,omitempty"`
|
||||
Type string `json:"type"`
|
||||
TargetID uint64 `json:"targetId"`
|
||||
Size string `json:"size,omitempty"`
|
||||
Format string `json:"format,omitempty"`
|
||||
}
|
||||
|
||||
// ThumbnailData represents a single thumbnail response
|
||||
type ThumbnailData struct {
|
||||
TargetID uint64 `json:"targetId"`
|
||||
State string `json:"state"` // "Completed", "Error", "Pending"
|
||||
ImageURL string `json:"imageUrl"`
|
||||
}
|
||||
|
||||
// BatchThumbnailsResponse represents the API response
|
||||
type BatchThumbnailsResponse struct {
|
||||
Data []ThumbnailData `json:"data"`
|
||||
}
|
||||
|
||||
// GetAssetThumbnails fetches thumbnails for multiple assets in a single batch request
|
||||
// Roblox allows up to 100 assets per batch
|
||||
func (c *Client) GetAssetThumbnails(assetIDs []uint64, size ThumbnailSize, format ThumbnailFormat) ([]ThumbnailData, error) {
|
||||
if len(assetIDs) == 0 {
|
||||
return []ThumbnailData{}, nil
|
||||
}
|
||||
if len(assetIDs) > 100 {
|
||||
return nil, GetError("batch size cannot exceed 100 assets")
|
||||
}
|
||||
|
||||
// Build request payload - the API expects an array directly, not wrapped in an object
|
||||
requests := make([]ThumbnailRequest, len(assetIDs))
|
||||
for i, assetID := range assetIDs {
|
||||
requests[i] = ThumbnailRequest{
|
||||
Type: "Asset",
|
||||
TargetID: assetID,
|
||||
Size: string(size),
|
||||
Format: string(format),
|
||||
}
|
||||
}
|
||||
|
||||
jsonData, err := json.Marshal(requests)
|
||||
if err != nil {
|
||||
return nil, GetError("JSONMarshalError: " + err.Error())
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("POST", "https://thumbnails.roblox.com/v1/batch", bytes.NewBuffer(jsonData))
|
||||
if err != nil {
|
||||
return nil, GetError("RequestCreationError: " + err.Error())
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, GetError("RequestError: " + err.Error())
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, GetError(fmt.Sprintf("ResponseError: status code %d, body: %s", resp.StatusCode, string(body)))
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, GetError("ReadBodyError: " + err.Error())
|
||||
}
|
||||
|
||||
var response BatchThumbnailsResponse
|
||||
if err := json.Unmarshal(body, &response); err != nil {
|
||||
return nil, GetError("JSONUnmarshalError: " + err.Error())
|
||||
}
|
||||
|
||||
return response.Data, nil
|
||||
}
|
||||
|
||||
// GetUserAvatarThumbnails fetches avatar thumbnails for multiple users in a single batch request
|
||||
func (c *Client) GetUserAvatarThumbnails(userIDs []uint64, size ThumbnailSize, format ThumbnailFormat) ([]ThumbnailData, error) {
|
||||
if len(userIDs) == 0 {
|
||||
return []ThumbnailData{}, nil
|
||||
}
|
||||
if len(userIDs) > 100 {
|
||||
return nil, GetError("batch size cannot exceed 100 users")
|
||||
}
|
||||
|
||||
// Build request payload - the API expects an array directly, not wrapped in an object
|
||||
requests := make([]ThumbnailRequest, len(userIDs))
|
||||
for i, userID := range userIDs {
|
||||
requests[i] = ThumbnailRequest{
|
||||
Type: "AvatarHeadShot",
|
||||
TargetID: userID,
|
||||
Size: string(size),
|
||||
Format: string(format),
|
||||
}
|
||||
}
|
||||
|
||||
jsonData, err := json.Marshal(requests)
|
||||
if err != nil {
|
||||
return nil, GetError("JSONMarshalError: " + err.Error())
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("POST", "https://thumbnails.roblox.com/v1/batch", bytes.NewBuffer(jsonData))
|
||||
if err != nil {
|
||||
return nil, GetError("RequestCreationError: " + err.Error())
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, GetError("RequestError: " + err.Error())
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, GetError(fmt.Sprintf("ResponseError: status code %d, body: %s", resp.StatusCode, string(body)))
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, GetError("ReadBodyError: " + err.Error())
|
||||
}
|
||||
|
||||
var response BatchThumbnailsResponse
|
||||
if err := json.Unmarshal(body, &response); err != nil {
|
||||
return nil, GetError("JSONUnmarshalError: " + err.Error())
|
||||
}
|
||||
|
||||
return response.Data, nil
|
||||
}
|
||||
72
pkg/roblox/users.go
Normal file
72
pkg/roblox/users.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package roblox
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// UserData represents a single user's information
|
||||
type UserData struct {
|
||||
ID uint64 `json:"id"`
|
||||
Name string `json:"name"`
|
||||
DisplayName string `json:"displayName"`
|
||||
}
|
||||
|
||||
// BatchUsersResponse represents the API response for batch user requests
|
||||
type BatchUsersResponse struct {
|
||||
Data []UserData `json:"data"`
|
||||
}
|
||||
|
||||
// GetUsernames fetches usernames for multiple users in a single batch request
|
||||
// Roblox allows up to 100 users per batch
|
||||
func (c *Client) GetUsernames(userIDs []uint64) ([]UserData, error) {
|
||||
if len(userIDs) == 0 {
|
||||
return []UserData{}, nil
|
||||
}
|
||||
if len(userIDs) > 100 {
|
||||
return nil, GetError("batch size cannot exceed 100 users")
|
||||
}
|
||||
|
||||
// Build request payload
|
||||
payload := map[string][]uint64{
|
||||
"userIds": userIDs,
|
||||
}
|
||||
|
||||
jsonData, err := json.Marshal(payload)
|
||||
if err != nil {
|
||||
return nil, GetError("JSONMarshalError: " + err.Error())
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("POST", "https://users.roblox.com/v1/users", bytes.NewBuffer(jsonData))
|
||||
if err != nil {
|
||||
return nil, GetError("RequestCreationError: " + err.Error())
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, GetError("RequestError: " + err.Error())
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, GetError(fmt.Sprintf("ResponseError: status code %d, body: %s", resp.StatusCode, string(body)))
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, GetError("ReadBodyError: " + err.Error())
|
||||
}
|
||||
|
||||
var response BatchUsersResponse
|
||||
if err := json.Unmarshal(body, &response); err != nil {
|
||||
return nil, GetError("JSONUnmarshalError: " + err.Error())
|
||||
}
|
||||
|
||||
return response.Data, nil
|
||||
}
|
||||
@@ -103,6 +103,10 @@ func (svc *Service) GetMapfix(ctx context.Context, id int64) (model.Mapfix, erro
|
||||
return svc.db.Mapfixes().Get(ctx, id)
|
||||
}
|
||||
|
||||
func (svc *Service) GetMapfixList(ctx context.Context, ids []int64) ([]model.Mapfix, error) {
|
||||
return svc.db.Mapfixes().GetList(ctx, ids)
|
||||
}
|
||||
|
||||
func (svc *Service) UpdateMapfix(ctx context.Context, id int64, pmap MapfixUpdate) error {
|
||||
return svc.db.Mapfixes().Update(ctx, id, datastore.OptionalMap(pmap))
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"git.itzana.me/strafesnet/go-grpc/maps"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
@@ -26,7 +27,7 @@ func (update MapUpdate) SetGameID(game_id uint32) {
|
||||
datastore.OptionalMap(update).Add("game_id", game_id)
|
||||
}
|
||||
func (update MapUpdate) SetDate(date int64) {
|
||||
datastore.OptionalMap(update).Add("date", date)
|
||||
datastore.OptionalMap(update).Add("date", time.Unix(date, 0))
|
||||
}
|
||||
func (update MapUpdate) SetSubmitter(submitter uint64) {
|
||||
datastore.OptionalMap(update).Add("submitter", submitter)
|
||||
|
||||
@@ -112,3 +112,29 @@ func (svc *Service) NatsValidateMapfix(
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (svc *Service) NatsReleaseMapfix(
|
||||
MapfixID int64,
|
||||
ModelID uint64,
|
||||
ModelVersion uint64,
|
||||
TargetAssetID uint64,
|
||||
) error {
|
||||
release_fix_request := model.ReleaseMapfixRequest{
|
||||
MapfixID: MapfixID,
|
||||
ModelID: ModelID,
|
||||
ModelVersion: ModelVersion,
|
||||
TargetAssetID: TargetAssetID,
|
||||
}
|
||||
|
||||
j, err := json.Marshal(release_fix_request)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = svc.nats.Publish("maptest.mapfixes.release", []byte(j))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -88,6 +88,28 @@ func (svc *Service) NatsUploadSubmission(
|
||||
return nil
|
||||
}
|
||||
|
||||
func (svc *Service) NatsBatchReleaseSubmissions(
|
||||
Submissions []model.ReleaseSubmissionRequest,
|
||||
operation int32,
|
||||
) error {
|
||||
release_new_request := model.BatchReleaseSubmissionsRequest{
|
||||
Submissions: Submissions,
|
||||
OperationID: operation,
|
||||
}
|
||||
|
||||
j, err := json.Marshal(release_new_request)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = svc.nats.Publish("maptest.submissions.batchrelease", []byte(j))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (svc *Service) NatsValidateSubmission(
|
||||
SubmissionID int64,
|
||||
ModelID uint64,
|
||||
|
||||
@@ -1,17 +1,22 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"git.itzana.me/strafesnet/go-grpc/maps"
|
||||
"git.itzana.me/strafesnet/go-grpc/users"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/roblox"
|
||||
"github.com/nats-io/nats.go"
|
||||
"github.com/redis/go-redis/v9"
|
||||
)
|
||||
|
||||
type Service struct {
|
||||
db datastore.Datastore
|
||||
nats nats.JetStreamContext
|
||||
maps maps.MapsServiceClient
|
||||
users users.UsersServiceClient
|
||||
db datastore.Datastore
|
||||
nats nats.JetStreamContext
|
||||
maps maps.MapsServiceClient
|
||||
users users.UsersServiceClient
|
||||
thumbnailService *ThumbnailService
|
||||
}
|
||||
|
||||
func NewService(
|
||||
@@ -19,11 +24,44 @@ func NewService(
|
||||
nats nats.JetStreamContext,
|
||||
maps maps.MapsServiceClient,
|
||||
users users.UsersServiceClient,
|
||||
robloxClient *roblox.Client,
|
||||
redisClient *redis.Client,
|
||||
) Service {
|
||||
return Service{
|
||||
db: db,
|
||||
nats: nats,
|
||||
maps: maps,
|
||||
users: users,
|
||||
db: db,
|
||||
nats: nats,
|
||||
maps: maps,
|
||||
users: users,
|
||||
thumbnailService: NewThumbnailService(robloxClient, redisClient),
|
||||
}
|
||||
}
|
||||
|
||||
// GetAssetThumbnails proxies to the thumbnail service
|
||||
func (s *Service) GetAssetThumbnails(ctx context.Context, assetIDs []uint64, size roblox.ThumbnailSize) (map[uint64]string, error) {
|
||||
return s.thumbnailService.GetAssetThumbnails(ctx, assetIDs, size)
|
||||
}
|
||||
|
||||
// GetUserAvatarThumbnails proxies to the thumbnail service
|
||||
func (s *Service) GetUserAvatarThumbnails(ctx context.Context, userIDs []uint64, size roblox.ThumbnailSize) (map[uint64]string, error) {
|
||||
return s.thumbnailService.GetUserAvatarThumbnails(ctx, userIDs, size)
|
||||
}
|
||||
|
||||
// GetSingleAssetThumbnail proxies to the thumbnail service
|
||||
func (s *Service) GetSingleAssetThumbnail(ctx context.Context, assetID uint64, size roblox.ThumbnailSize) (string, error) {
|
||||
return s.thumbnailService.GetSingleAssetThumbnail(ctx, assetID, size)
|
||||
}
|
||||
|
||||
// GetSingleUserAvatarThumbnail proxies to the thumbnail service
|
||||
func (s *Service) GetSingleUserAvatarThumbnail(ctx context.Context, userID uint64, size roblox.ThumbnailSize) (string, error) {
|
||||
return s.thumbnailService.GetSingleUserAvatarThumbnail(ctx, userID, size)
|
||||
}
|
||||
|
||||
// GetUsernames proxies to the thumbnail service
|
||||
func (s *Service) GetUsernames(ctx context.Context, userIDs []uint64) (map[uint64]string, error) {
|
||||
return s.thumbnailService.GetUsernames(ctx, userIDs)
|
||||
}
|
||||
|
||||
// GetSingleUsername proxies to the thumbnail service
|
||||
func (s *Service) GetSingleUsername(ctx context.Context, userID uint64) (string, error) {
|
||||
return s.thumbnailService.GetSingleUsername(ctx, userID)
|
||||
}
|
||||
|
||||
218
pkg/service/thumbnails.go
Normal file
218
pkg/service/thumbnails.go
Normal file
@@ -0,0 +1,218 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/roblox"
|
||||
"github.com/redis/go-redis/v9"
|
||||
)
|
||||
|
||||
type ThumbnailService struct {
|
||||
robloxClient *roblox.Client
|
||||
redisClient *redis.Client
|
||||
cacheTTL time.Duration
|
||||
}
|
||||
|
||||
func NewThumbnailService(robloxClient *roblox.Client, redisClient *redis.Client) *ThumbnailService {
|
||||
return &ThumbnailService{
|
||||
robloxClient: robloxClient,
|
||||
redisClient: redisClient,
|
||||
cacheTTL: 24 * time.Hour, // Cache thumbnails for 24 hours
|
||||
}
|
||||
}
|
||||
|
||||
// CachedThumbnail represents a cached thumbnail entry
|
||||
type CachedThumbnail struct {
|
||||
ImageURL string `json:"imageUrl"`
|
||||
State string `json:"state"`
|
||||
CachedAt time.Time `json:"cachedAt"`
|
||||
}
|
||||
|
||||
// GetAssetThumbnails fetches thumbnails with Redis caching and batching
|
||||
func (s *ThumbnailService) GetAssetThumbnails(ctx context.Context, assetIDs []uint64, size roblox.ThumbnailSize) (map[uint64]string, error) {
|
||||
if len(assetIDs) == 0 {
|
||||
return map[uint64]string{}, nil
|
||||
}
|
||||
|
||||
result := make(map[uint64]string)
|
||||
var missingIDs []uint64
|
||||
|
||||
// Try to get from cache first
|
||||
for _, assetID := range assetIDs {
|
||||
cacheKey := fmt.Sprintf("thumbnail:asset:%d:%s", assetID, size)
|
||||
cached, err := s.redisClient.Get(ctx, cacheKey).Result()
|
||||
|
||||
if err == redis.Nil {
|
||||
// Cache miss
|
||||
missingIDs = append(missingIDs, assetID)
|
||||
} else if err != nil {
|
||||
// Redis error - treat as cache miss
|
||||
missingIDs = append(missingIDs, assetID)
|
||||
} else {
|
||||
// Cache hit
|
||||
var thumbnail CachedThumbnail
|
||||
if err := json.Unmarshal([]byte(cached), &thumbnail); err == nil && thumbnail.State == "Completed" {
|
||||
result[assetID] = thumbnail.ImageURL
|
||||
} else {
|
||||
missingIDs = append(missingIDs, assetID)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If all were cached, return early
|
||||
if len(missingIDs) == 0 {
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Batch fetch missing thumbnails from Roblox API
|
||||
// Split into batches of 100 (Roblox API limit)
|
||||
for i := 0; i < len(missingIDs); i += 100 {
|
||||
end := i + 100
|
||||
if end > len(missingIDs) {
|
||||
end = len(missingIDs)
|
||||
}
|
||||
batch := missingIDs[i:end]
|
||||
|
||||
thumbnails, err := s.robloxClient.GetAssetThumbnails(batch, size, roblox.FormatPng)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch thumbnails: %w", err)
|
||||
}
|
||||
|
||||
// Process results and cache them
|
||||
for _, thumb := range thumbnails {
|
||||
cached := CachedThumbnail{
|
||||
ImageURL: thumb.ImageURL,
|
||||
State: thumb.State,
|
||||
CachedAt: time.Now(),
|
||||
}
|
||||
|
||||
if thumb.State == "Completed" && thumb.ImageURL != "" {
|
||||
result[thumb.TargetID] = thumb.ImageURL
|
||||
}
|
||||
|
||||
// Cache the result (even if incomplete, to avoid repeated API calls)
|
||||
cacheKey := fmt.Sprintf("thumbnail:asset:%d:%s", thumb.TargetID, size)
|
||||
cachedJSON, _ := json.Marshal(cached)
|
||||
|
||||
// Use shorter TTL for incomplete thumbnails
|
||||
ttl := s.cacheTTL
|
||||
if thumb.State != "Completed" {
|
||||
ttl = 5 * time.Minute
|
||||
}
|
||||
|
||||
s.redisClient.Set(ctx, cacheKey, cachedJSON, ttl)
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetUserAvatarThumbnails fetches user avatar thumbnails with Redis caching and batching
|
||||
func (s *ThumbnailService) GetUserAvatarThumbnails(ctx context.Context, userIDs []uint64, size roblox.ThumbnailSize) (map[uint64]string, error) {
|
||||
if len(userIDs) == 0 {
|
||||
return map[uint64]string{}, nil
|
||||
}
|
||||
|
||||
result := make(map[uint64]string)
|
||||
var missingIDs []uint64
|
||||
|
||||
// Try to get from cache first
|
||||
for _, userID := range userIDs {
|
||||
cacheKey := fmt.Sprintf("thumbnail:user:%d:%s", userID, size)
|
||||
cached, err := s.redisClient.Get(ctx, cacheKey).Result()
|
||||
|
||||
if err == redis.Nil {
|
||||
// Cache miss
|
||||
missingIDs = append(missingIDs, userID)
|
||||
} else if err != nil {
|
||||
// Redis error - treat as cache miss
|
||||
missingIDs = append(missingIDs, userID)
|
||||
} else {
|
||||
// Cache hit
|
||||
var thumbnail CachedThumbnail
|
||||
if err := json.Unmarshal([]byte(cached), &thumbnail); err == nil && thumbnail.State == "Completed" {
|
||||
result[userID] = thumbnail.ImageURL
|
||||
} else {
|
||||
missingIDs = append(missingIDs, userID)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If all were cached, return early
|
||||
if len(missingIDs) == 0 {
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Batch fetch missing thumbnails from Roblox API
|
||||
// Split into batches of 100 (Roblox API limit)
|
||||
for i := 0; i < len(missingIDs); i += 100 {
|
||||
end := i + 100
|
||||
if end > len(missingIDs) {
|
||||
end = len(missingIDs)
|
||||
}
|
||||
batch := missingIDs[i:end]
|
||||
|
||||
thumbnails, err := s.robloxClient.GetUserAvatarThumbnails(batch, size, roblox.FormatPng)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch user thumbnails: %w", err)
|
||||
}
|
||||
|
||||
// Process results and cache them
|
||||
for _, thumb := range thumbnails {
|
||||
cached := CachedThumbnail{
|
||||
ImageURL: thumb.ImageURL,
|
||||
State: thumb.State,
|
||||
CachedAt: time.Now(),
|
||||
}
|
||||
|
||||
if thumb.State == "Completed" && thumb.ImageURL != "" {
|
||||
result[thumb.TargetID] = thumb.ImageURL
|
||||
}
|
||||
|
||||
// Cache the result
|
||||
cacheKey := fmt.Sprintf("thumbnail:user:%d:%s", thumb.TargetID, size)
|
||||
cachedJSON, _ := json.Marshal(cached)
|
||||
|
||||
// Use shorter TTL for incomplete thumbnails
|
||||
ttl := s.cacheTTL
|
||||
if thumb.State != "Completed" {
|
||||
ttl = 5 * time.Minute
|
||||
}
|
||||
|
||||
s.redisClient.Set(ctx, cacheKey, cachedJSON, ttl)
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetSingleAssetThumbnail is a convenience method for fetching a single asset thumbnail
|
||||
func (s *ThumbnailService) GetSingleAssetThumbnail(ctx context.Context, assetID uint64, size roblox.ThumbnailSize) (string, error) {
|
||||
results, err := s.GetAssetThumbnails(ctx, []uint64{assetID}, size)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if url, ok := results[assetID]; ok {
|
||||
return url, nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("thumbnail not available for asset %d", assetID)
|
||||
}
|
||||
|
||||
// GetSingleUserAvatarThumbnail is a convenience method for fetching a single user avatar thumbnail
|
||||
func (s *ThumbnailService) GetSingleUserAvatarThumbnail(ctx context.Context, userID uint64, size roblox.ThumbnailSize) (string, error) {
|
||||
results, err := s.GetUserAvatarThumbnails(ctx, []uint64{userID}, size)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if url, ok := results[userID]; ok {
|
||||
return url, nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("thumbnail not available for user %d", userID)
|
||||
}
|
||||
108
pkg/service/users.go
Normal file
108
pkg/service/users.go
Normal file
@@ -0,0 +1,108 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/roblox"
|
||||
"github.com/redis/go-redis/v9"
|
||||
)
|
||||
|
||||
// CachedUser represents a cached user entry
|
||||
type CachedUser struct {
|
||||
Name string `json:"name"`
|
||||
DisplayName string `json:"displayName"`
|
||||
CachedAt time.Time `json:"cachedAt"`
|
||||
}
|
||||
|
||||
// GetUsernames fetches usernames with Redis caching and batching
|
||||
func (s *ThumbnailService) GetUsernames(ctx context.Context, userIDs []uint64) (map[uint64]string, error) {
|
||||
if len(userIDs) == 0 {
|
||||
return map[uint64]string{}, nil
|
||||
}
|
||||
|
||||
result := make(map[uint64]string)
|
||||
var missingIDs []uint64
|
||||
|
||||
// Try to get from cache first
|
||||
for _, userID := range userIDs {
|
||||
cacheKey := fmt.Sprintf("user:name:%d", userID)
|
||||
cached, err := s.redisClient.Get(ctx, cacheKey).Result()
|
||||
|
||||
if err == redis.Nil {
|
||||
// Cache miss
|
||||
missingIDs = append(missingIDs, userID)
|
||||
} else if err != nil {
|
||||
// Redis error - treat as cache miss
|
||||
missingIDs = append(missingIDs, userID)
|
||||
} else {
|
||||
// Cache hit
|
||||
var user CachedUser
|
||||
if err := json.Unmarshal([]byte(cached), &user); err == nil && user.Name != "" {
|
||||
result[userID] = user.Name
|
||||
} else {
|
||||
missingIDs = append(missingIDs, userID)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If all were cached, return early
|
||||
if len(missingIDs) == 0 {
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Batch fetch missing usernames from Roblox API
|
||||
// Split into batches of 100 (Roblox API limit)
|
||||
for i := 0; i < len(missingIDs); i += 100 {
|
||||
end := i + 100
|
||||
if end > len(missingIDs) {
|
||||
end = len(missingIDs)
|
||||
}
|
||||
batch := missingIDs[i:end]
|
||||
|
||||
var users []roblox.UserData
|
||||
var err error
|
||||
users, err = s.robloxClient.GetUsernames(batch)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch usernames: %w", err)
|
||||
}
|
||||
|
||||
// Process results and cache them
|
||||
for _, user := range users {
|
||||
cached := CachedUser{
|
||||
Name: user.Name,
|
||||
DisplayName: user.DisplayName,
|
||||
CachedAt: time.Now(),
|
||||
}
|
||||
|
||||
if user.Name != "" {
|
||||
result[user.ID] = user.Name
|
||||
}
|
||||
|
||||
// Cache the result
|
||||
cacheKey := fmt.Sprintf("user:name:%d", user.ID)
|
||||
cachedJSON, _ := json.Marshal(cached)
|
||||
|
||||
// Cache usernames for a long time (7 days) since they rarely change
|
||||
s.redisClient.Set(ctx, cacheKey, cachedJSON, 7*24*time.Hour)
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetSingleUsername is a convenience method for fetching a single username
|
||||
func (s *ThumbnailService) GetSingleUsername(ctx context.Context, userID uint64) (string, error) {
|
||||
results, err := s.GetUsernames(ctx, []uint64{userID})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if name, ok := results[userID]; ok {
|
||||
return name, nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("username not available for user %d", userID)
|
||||
}
|
||||
@@ -26,6 +26,8 @@ func NewMapfixesController(
|
||||
var(
|
||||
// prevent two mapfixes with same asset id
|
||||
ActiveMapfixStatuses = []model.MapfixStatus{
|
||||
model.MapfixStatusReleasing,
|
||||
model.MapfixStatusUploaded,
|
||||
model.MapfixStatusUploading,
|
||||
model.MapfixStatusValidated,
|
||||
model.MapfixStatusValidating,
|
||||
@@ -184,7 +186,7 @@ func (svc *Mapfixes) SetStatusValidated(ctx context.Context, params *validator.M
|
||||
// (Internal endpoint) Role Validator changes status from Validating -> Accepted.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/validator-failed
|
||||
func (svc *Mapfixes) SetStatusFailed(ctx context.Context, params *validator.MapfixID) (*validator.NullResponse, error) {
|
||||
func (svc *Mapfixes) SetStatusNotValidated(ctx context.Context, params *validator.MapfixID) (*validator.NullResponse, error) {
|
||||
MapfixID := int64(params.ID)
|
||||
// transaction
|
||||
target_status := model.MapfixStatusAcceptedUnvalidated
|
||||
@@ -253,6 +255,117 @@ func (svc *Mapfixes) SetStatusUploaded(ctx context.Context, params *validator.Ma
|
||||
|
||||
return &validator.NullResponse{}, nil
|
||||
}
|
||||
func (svc *Mapfixes) SetStatusNotUploaded(ctx context.Context, params *validator.MapfixID) (*validator.NullResponse, error) {
|
||||
MapfixID := int64(params.ID)
|
||||
// transaction
|
||||
target_status := model.MapfixStatusValidated
|
||||
update := service.NewMapfixUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allow_statuses := []model.MapfixStatus{model.MapfixStatusUploading}
|
||||
err := svc.inner.UpdateMapfixIfStatus(ctx, MapfixID, allow_statuses, update)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// push an action audit event
|
||||
event_data := model.AuditEventDataAction{
|
||||
TargetStatus: uint32(target_status),
|
||||
}
|
||||
|
||||
err = svc.inner.CreateAuditEventAction(
|
||||
ctx,
|
||||
model.ValidatorUserID,
|
||||
model.Resource{
|
||||
ID: MapfixID,
|
||||
Type: model.ResourceMapfix,
|
||||
},
|
||||
event_data,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &validator.NullResponse{}, nil
|
||||
}
|
||||
|
||||
// ActionMapfixReleased implements actionMapfixReleased operation.
|
||||
//
|
||||
// (Internal endpoint) Role Validator changes status from Releasing -> Released.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/validator-released
|
||||
func (svc *Mapfixes) SetStatusReleased(ctx context.Context, params *validator.MapfixReleaseRequest) (*validator.NullResponse, error) {
|
||||
MapfixID := int64(params.MapfixID)
|
||||
// transaction
|
||||
target_status := model.MapfixStatusReleased
|
||||
update := service.NewMapfixUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allow_statuses := []model.MapfixStatus{model.MapfixStatusReleasing}
|
||||
err := svc.inner.UpdateMapfixIfStatus(ctx, MapfixID, allow_statuses, update)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
event_data := model.AuditEventDataAction{
|
||||
TargetStatus: uint32(target_status),
|
||||
}
|
||||
|
||||
err = svc.inner.CreateAuditEventAction(
|
||||
ctx,
|
||||
model.ValidatorUserID,
|
||||
model.Resource{
|
||||
ID: MapfixID,
|
||||
Type: model.ResourceMapfix,
|
||||
},
|
||||
event_data,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// metadata maintenance
|
||||
map_update := service.NewMapUpdate()
|
||||
map_update.SetAssetVersion(params.AssetVersion)
|
||||
map_update.SetModes(params.Modes)
|
||||
|
||||
err = svc.inner.UpdateMap(ctx, int64(params.TargetAssetID), map_update)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &validator.NullResponse{}, nil
|
||||
}
|
||||
func (svc *Mapfixes) SetStatusNotReleased(ctx context.Context, params *validator.MapfixID) (*validator.NullResponse, error) {
|
||||
MapfixID := int64(params.ID)
|
||||
// transaction
|
||||
target_status := model.MapfixStatusUploaded
|
||||
update := service.NewMapfixUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allow_statuses := []model.MapfixStatus{model.MapfixStatusReleasing}
|
||||
err := svc.inner.UpdateMapfixIfStatus(ctx, MapfixID, allow_statuses, update)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// push an action audit event
|
||||
event_data := model.AuditEventDataAction{
|
||||
TargetStatus: uint32(target_status),
|
||||
}
|
||||
|
||||
err = svc.inner.CreateAuditEventAction(
|
||||
ctx,
|
||||
model.ValidatorUserID,
|
||||
model.Resource{
|
||||
ID: MapfixID,
|
||||
Type: model.ResourceMapfix,
|
||||
},
|
||||
event_data,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &validator.NullResponse{}, nil
|
||||
}
|
||||
|
||||
// CreateMapfixAuditError implements createMapfixAuditError operation.
|
||||
//
|
||||
|
||||
@@ -19,6 +19,18 @@ func NewOperationsController(
|
||||
}
|
||||
}
|
||||
|
||||
func (svc *Operations) Success(ctx context.Context, params *validator.OperationSuccessRequest) (*validator.NullResponse, error) {
|
||||
success_params := service.NewOperationCompleteParams(
|
||||
params.Path,
|
||||
)
|
||||
err := svc.inner.CompleteOperation(ctx, int32(params.OperationID), success_params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &validator.NullResponse{}, nil
|
||||
}
|
||||
|
||||
// ActionOperationFailed implements actionOperationFailed operation.
|
||||
//
|
||||
// Fail the specified OperationID with a StatusMessage.
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"git.itzana.me/strafesnet/go-grpc/validator"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
@@ -24,7 +25,7 @@ func NewSubmissionsController(
|
||||
}
|
||||
|
||||
var(
|
||||
// prevent two mapfixes with same asset id
|
||||
// prevent two submissions with same asset id
|
||||
ActiveSubmissionStatuses = []model.SubmissionStatus{
|
||||
model.SubmissionStatusUploading,
|
||||
model.SubmissionStatusValidated,
|
||||
@@ -202,7 +203,7 @@ func (svc *Submissions) SetStatusValidated(ctx context.Context, params *validato
|
||||
// (Internal endpoint) Role Validator changes status from Validating -> Accepted.
|
||||
//
|
||||
// POST /submissions/{SubmissionID}/status/validator-failed
|
||||
func (svc *Submissions) SetStatusFailed(ctx context.Context, params *validator.SubmissionID) (*validator.NullResponse, error) {
|
||||
func (svc *Submissions) SetStatusNotValidated(ctx context.Context, params *validator.SubmissionID) (*validator.NullResponse, error) {
|
||||
SubmissionID := int64(params.ID)
|
||||
// transaction
|
||||
target_status := model.SubmissionStatusAcceptedUnvalidated
|
||||
@@ -273,6 +274,68 @@ func (svc *Submissions) SetStatusUploaded(ctx context.Context, params *validator
|
||||
return &validator.NullResponse{}, nil
|
||||
}
|
||||
|
||||
func (svc *Submissions) SetStatusNotUploaded(ctx context.Context, params *validator.SubmissionID) (*validator.NullResponse, error) {
|
||||
SubmissionID := int64(params.ID)
|
||||
// transaction
|
||||
target_status := model.SubmissionStatusValidated
|
||||
update := service.NewSubmissionUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allowed_statuses :=[]model.SubmissionStatus{model.SubmissionStatusUploading}
|
||||
err := svc.inner.UpdateSubmissionIfStatus(ctx, SubmissionID, allowed_statuses, update)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// push an action audit event
|
||||
event_data := model.AuditEventDataAction{
|
||||
TargetStatus: uint32(target_status),
|
||||
}
|
||||
|
||||
err = svc.inner.CreateAuditEventAction(
|
||||
ctx,
|
||||
model.ValidatorUserID,
|
||||
model.Resource{
|
||||
ID: SubmissionID,
|
||||
Type: model.ResourceSubmission,
|
||||
},
|
||||
event_data,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &validator.NullResponse{}, nil
|
||||
}
|
||||
|
||||
func (svc *Submissions) SetStatusReleased(ctx context.Context, params *validator.SubmissionReleaseRequest) (*validator.NullResponse, error){
|
||||
// create map with go-grpc
|
||||
_, err := svc.inner.CreateMap(ctx, model.Map{
|
||||
ID: params.MapCreate.ID,
|
||||
DisplayName: params.MapCreate.DisplayName,
|
||||
Creator: params.MapCreate.Creator,
|
||||
GameID: params.MapCreate.GameID,
|
||||
Date: time.Unix(params.MapCreate.Date, 0),
|
||||
Submitter: params.MapCreate.Submitter,
|
||||
Thumbnail: 0,
|
||||
AssetVersion: params.MapCreate.AssetVersion,
|
||||
LoadCount: 0,
|
||||
Modes: params.MapCreate.Modes,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update status to Released
|
||||
update := service.NewSubmissionUpdate()
|
||||
update.SetStatusID(model.SubmissionStatusReleased)
|
||||
err = svc.inner.UpdateSubmissionIfStatus(ctx, int64(params.SubmissionID), []model.SubmissionStatus{model.SubmissionStatusUploaded}, update)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &validator.NullResponse{}, nil
|
||||
}
|
||||
|
||||
// CreateSubmissionAuditError implements createSubmissionAuditError operation.
|
||||
//
|
||||
// Post an error to the audit log
|
||||
|
||||
@@ -22,6 +22,8 @@ var(
|
||||
}
|
||||
// limit mapfixes in the pipeline to one per target map
|
||||
ActiveAcceptedMapfixStatuses = []model.MapfixStatus{
|
||||
model.MapfixStatusReleasing,
|
||||
model.MapfixStatusUploaded,
|
||||
model.MapfixStatusUploading,
|
||||
model.MapfixStatusValidated,
|
||||
model.MapfixStatusValidating,
|
||||
@@ -193,6 +195,9 @@ func (svc *Service) ListMapfixes(ctx context.Context, params api.ListMapfixesPar
|
||||
if asset_id, asset_id_ok := params.AssetID.Get(); asset_id_ok{
|
||||
filter.SetAssetID(uint64(asset_id))
|
||||
}
|
||||
if asset_version, asset_version_ok := params.AssetVersion.Get(); asset_version_ok{
|
||||
filter.SetAssetVersion(uint64(asset_version))
|
||||
}
|
||||
if target_asset_id, target_asset_id_ok := params.TargetAssetID.Get(); target_asset_id_ok{
|
||||
filter.SetTargetAssetID(uint64(target_asset_id))
|
||||
}
|
||||
@@ -322,6 +327,48 @@ func (svc *Service) UpdateMapfixModel(ctx context.Context, params api.UpdateMapf
|
||||
)
|
||||
}
|
||||
|
||||
// UpdateMapfixDescription implements updateMapfixDescription operation.
|
||||
//
|
||||
// Update description (submitter only, status ChangesRequested or UnderConstruction).
|
||||
//
|
||||
// PATCH /mapfixes/{MapfixID}/description
|
||||
func (svc *Service) UpdateMapfixDescription(ctx context.Context, req api.UpdateMapfixDescriptionReq, params api.UpdateMapfixDescriptionParams) error {
|
||||
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
|
||||
if !ok {
|
||||
return ErrUserInfo
|
||||
}
|
||||
|
||||
// read mapfix
|
||||
mapfix, err := svc.inner.GetMapfix(ctx, params.MapfixID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
userId, err := userInfo.GetUserID()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// check if caller is the submitter
|
||||
if userId != mapfix.Submitter {
|
||||
return ErrPermissionDeniedNotSubmitter
|
||||
}
|
||||
|
||||
// read the new description from request body
|
||||
data, err := io.ReadAll(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
newDescription := string(data)
|
||||
|
||||
// check if Status is ChangesRequested or UnderConstruction
|
||||
update := service.NewMapfixUpdate()
|
||||
update.SetDescription(newDescription)
|
||||
allow_statuses := []model.MapfixStatus{model.MapfixStatusChangesRequested, model.MapfixStatusUnderConstruction}
|
||||
return svc.inner.UpdateMapfixIfStatus(ctx, params.MapfixID, allow_statuses, update)
|
||||
}
|
||||
|
||||
// ActionMapfixReject invokes actionMapfixReject operation.
|
||||
//
|
||||
// Role Reviewer changes status from Submitted -> Rejected.
|
||||
@@ -401,7 +448,12 @@ func (svc *Service) ActionMapfixRequestChanges(ctx context.Context, params api.A
|
||||
target_status := model.MapfixStatusChangesRequested
|
||||
update := service.NewMapfixUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allow_statuses := []model.MapfixStatus{model.MapfixStatusValidated, model.MapfixStatusAcceptedUnvalidated, model.MapfixStatusSubmitted}
|
||||
allow_statuses := []model.MapfixStatus{
|
||||
model.MapfixStatusUploaded,
|
||||
model.MapfixStatusValidated,
|
||||
model.MapfixStatusAcceptedUnvalidated,
|
||||
model.MapfixStatusSubmitted,
|
||||
}
|
||||
err = svc.inner.UpdateMapfixIfStatus(ctx, params.MapfixID, allow_statuses, update)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -507,7 +559,11 @@ func (svc *Service) ActionMapfixTriggerSubmit(ctx context.Context, params api.Ac
|
||||
target_status := model.MapfixStatusSubmitting
|
||||
update := service.NewMapfixUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allow_statuses := []model.MapfixStatus{model.MapfixStatusUnderConstruction, model.MapfixStatusChangesRequested}
|
||||
allow_statuses := []model.MapfixStatus{
|
||||
model.MapfixStatusUnderConstruction,
|
||||
model.MapfixStatusChangesRequested,
|
||||
model.MapfixStatusSubmitted,
|
||||
}
|
||||
err = svc.inner.UpdateMapfixIfStatus(ctx, params.MapfixID, allow_statuses, update)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -786,6 +842,127 @@ func (svc *Service) ActionMapfixValidated(ctx context.Context, params api.Action
|
||||
)
|
||||
}
|
||||
|
||||
// ActionMapfixTriggerRelease invokes actionMapfixTriggerRelease operation.
|
||||
//
|
||||
// Role MapfixUpload changes status from Uploaded -> Releasing.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/trigger-release
|
||||
func (svc *Service) ActionMapfixTriggerRelease(ctx context.Context, params api.ActionMapfixTriggerReleaseParams) error {
|
||||
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
|
||||
if !ok {
|
||||
return ErrUserInfo
|
||||
}
|
||||
|
||||
has_role, err := userInfo.HasRoleMapfixUpload()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// check if caller has required role
|
||||
if !has_role {
|
||||
return ErrPermissionDeniedNeedRoleMapfixUpload
|
||||
}
|
||||
|
||||
userId, err := userInfo.GetUserID()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// transaction
|
||||
target_status := model.MapfixStatusReleasing
|
||||
update := service.NewMapfixUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allow_statuses := []model.MapfixStatus{model.MapfixStatusUploaded}
|
||||
mapfix, err := svc.inner.UpdateAndGetMapfixIfStatus(ctx, params.MapfixID, allow_statuses, update)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// this is a map fix
|
||||
err = svc.inner.NatsReleaseMapfix(
|
||||
mapfix.ID,
|
||||
mapfix.ValidatedAssetID,
|
||||
mapfix.ValidatedAssetVersion,
|
||||
mapfix.TargetAssetID,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
event_data := model.AuditEventDataAction{
|
||||
TargetStatus: uint32(target_status),
|
||||
}
|
||||
|
||||
return svc.inner.CreateAuditEventAction(
|
||||
ctx,
|
||||
userId,
|
||||
model.Resource{
|
||||
ID: params.MapfixID,
|
||||
Type: model.ResourceMapfix,
|
||||
},
|
||||
event_data,
|
||||
)
|
||||
}
|
||||
|
||||
// ActionMapfixUploaded invokes actionMapfixUploaded operation.
|
||||
//
|
||||
// Role MapfixUpload manually resets releasing softlock and changes status from Releasing -> Uploaded.
|
||||
//
|
||||
// POST /mapfixes/{MapfixID}/status/reset-releasing
|
||||
func (svc *Service) ActionMapfixUploaded(ctx context.Context, params api.ActionMapfixUploadedParams) error {
|
||||
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
|
||||
if !ok {
|
||||
return ErrUserInfo
|
||||
}
|
||||
|
||||
has_role, err := userInfo.HasRoleMapfixUpload()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// check if caller has required role
|
||||
if !has_role {
|
||||
return ErrPermissionDeniedNeedRoleMapfixUpload
|
||||
}
|
||||
|
||||
userId, err := userInfo.GetUserID()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// check when mapfix was updated
|
||||
mapfix, err := svc.inner.GetMapfix(ctx, params.MapfixID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if time.Now().Before(mapfix.UpdatedAt.Add(time.Second*10)) {
|
||||
// the last time the mapfix was updated must be longer than 10 seconds ago
|
||||
return ErrDelayReset
|
||||
}
|
||||
|
||||
// transaction
|
||||
target_status := model.MapfixStatusUploaded
|
||||
update := service.NewMapfixUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allow_statuses := []model.MapfixStatus{model.MapfixStatusReleasing}
|
||||
err = svc.inner.UpdateMapfixIfStatus(ctx, params.MapfixID, allow_statuses, update)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
event_data := model.AuditEventDataAction{
|
||||
TargetStatus: uint32(target_status),
|
||||
}
|
||||
|
||||
return svc.inner.CreateAuditEventAction(
|
||||
ctx,
|
||||
userId,
|
||||
model.Resource{
|
||||
ID: params.MapfixID,
|
||||
Type: model.ResourceMapfix,
|
||||
},
|
||||
event_data,
|
||||
)
|
||||
}
|
||||
|
||||
// ActionMapfixTriggerValidate invokes actionMapfixTriggerValidate operation.
|
||||
//
|
||||
// Role Reviewer triggers validation and changes status from Submitted -> Validating.
|
||||
|
||||
@@ -36,10 +36,28 @@ func (svc *Service) CreateScript(ctx context.Context, req *api.ScriptCreate) (*a
|
||||
return nil, err
|
||||
}
|
||||
|
||||
hash := int64(model.HashSource(req.Source))
|
||||
|
||||
// Check if a script with this hash already exists
|
||||
filter := service.NewScriptFilter()
|
||||
filter.SetHash(hash)
|
||||
existingScripts, err := svc.inner.ListScripts(ctx, filter, model.Page{Number: 1, Size: 1})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// If script with this hash exists, return existing script ID
|
||||
if len(existingScripts) > 0 {
|
||||
return &api.ScriptID{
|
||||
ScriptID: existingScripts[0].ID,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Create new script
|
||||
script, err := svc.inner.CreateScript(ctx, model.Script{
|
||||
ID: 0,
|
||||
Name: req.Name,
|
||||
Hash: int64(model.HashSource(req.Source)),
|
||||
Hash: hash,
|
||||
Source: req.Source,
|
||||
ResourceType: model.ResourceType(req.ResourceType),
|
||||
ResourceID: req.ResourceID.Or(0),
|
||||
|
||||
@@ -58,7 +58,7 @@ func (usr UserInfoHandle) Validate() (bool, error) {
|
||||
}
|
||||
return validate.Valid, nil
|
||||
}
|
||||
func (usr UserInfoHandle) hasRoles(wantRoles model.Roles) (bool, error) {
|
||||
func (usr UserInfoHandle) HasRoles(wantRoles model.Roles) (bool, error) {
|
||||
haveroles, err := usr.GetRoles()
|
||||
if err != nil {
|
||||
return false, err
|
||||
@@ -94,25 +94,25 @@ func (usr UserInfoHandle) GetRoles() (model.Roles, error) {
|
||||
|
||||
// RoleThumbnail
|
||||
func (usr UserInfoHandle) HasRoleMapfixUpload() (bool, error) {
|
||||
return usr.hasRoles(model.RolesMapfixUpload)
|
||||
return usr.HasRoles(model.RolesMapfixUpload)
|
||||
}
|
||||
func (usr UserInfoHandle) HasRoleMapfixReview() (bool, error) {
|
||||
return usr.hasRoles(model.RolesMapfixReview)
|
||||
return usr.HasRoles(model.RolesMapfixReview)
|
||||
}
|
||||
func (usr UserInfoHandle) HasRoleMapDownload() (bool, error) {
|
||||
return usr.hasRoles(model.RolesMapDownload)
|
||||
return usr.HasRoles(model.RolesMapDownload)
|
||||
}
|
||||
func (usr UserInfoHandle) HasRoleSubmissionRelease() (bool, error) {
|
||||
return usr.hasRoles(model.RolesSubmissionRelease)
|
||||
return usr.HasRoles(model.RolesSubmissionRelease)
|
||||
}
|
||||
func (usr UserInfoHandle) HasRoleSubmissionUpload() (bool, error) {
|
||||
return usr.hasRoles(model.RolesSubmissionUpload)
|
||||
return usr.HasRoles(model.RolesSubmissionUpload)
|
||||
}
|
||||
func (usr UserInfoHandle) HasRoleSubmissionReview() (bool, error) {
|
||||
return usr.hasRoles(model.RolesSubmissionReview)
|
||||
return usr.HasRoles(model.RolesSubmissionReview)
|
||||
}
|
||||
func (usr UserInfoHandle) HasRoleScriptWrite() (bool, error) {
|
||||
return usr.hasRoles(model.RolesScriptWrite)
|
||||
return usr.HasRoles(model.RolesScriptWrite)
|
||||
}
|
||||
/// Not implemented
|
||||
func (usr UserInfoHandle) HasRoleMaptest() (bool, error) {
|
||||
|
||||
105
pkg/web_api/stats.go
Normal file
105
pkg/web_api/stats.go
Normal file
@@ -0,0 +1,105 @@
|
||||
package web_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/api"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/datastore"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/model"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/service"
|
||||
)
|
||||
|
||||
// GET /stats
|
||||
func (svc *Service) GetStats(ctx context.Context) (*api.Stats, error) {
|
||||
// Get total submissions count
|
||||
totalSubmissions, _, err := svc.inner.ListSubmissionsWithTotal(ctx, service.NewSubmissionFilter(), model.Page{
|
||||
Number: 1,
|
||||
Size: 0, // We only want the count, not the items
|
||||
}, datastore.ListSortDisabled)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Get total mapfixes count
|
||||
totalMapfixes, _, err := svc.inner.ListMapfixesWithTotal(ctx, service.NewMapfixFilter(), model.Page{
|
||||
Number: 1,
|
||||
Size: 0, // We only want the count, not the items
|
||||
}, datastore.ListSortDisabled)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Get released submissions count
|
||||
releasedSubmissionsFilter := service.NewSubmissionFilter()
|
||||
releasedSubmissionsFilter.SetStatuses([]model.SubmissionStatus{model.SubmissionStatusReleased})
|
||||
releasedSubmissions, _, err := svc.inner.ListSubmissionsWithTotal(ctx, releasedSubmissionsFilter, model.Page{
|
||||
Number: 1,
|
||||
Size: 0,
|
||||
}, datastore.ListSortDisabled)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Get released mapfixes count
|
||||
releasedMapfixesFilter := service.NewMapfixFilter()
|
||||
releasedMapfixesFilter.SetStatuses([]model.MapfixStatus{model.MapfixStatusReleased})
|
||||
releasedMapfixes, _, err := svc.inner.ListMapfixesWithTotal(ctx, releasedMapfixesFilter, model.Page{
|
||||
Number: 1,
|
||||
Size: 0,
|
||||
}, datastore.ListSortDisabled)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Get submitted submissions count (under review)
|
||||
submittedSubmissionsFilter := service.NewSubmissionFilter()
|
||||
submittedSubmissionsFilter.SetStatuses([]model.SubmissionStatus{
|
||||
model.SubmissionStatusUnderConstruction,
|
||||
model.SubmissionStatusChangesRequested,
|
||||
model.SubmissionStatusSubmitting,
|
||||
model.SubmissionStatusSubmitted,
|
||||
model.SubmissionStatusAcceptedUnvalidated,
|
||||
model.SubmissionStatusValidating,
|
||||
model.SubmissionStatusValidated,
|
||||
model.SubmissionStatusUploading,
|
||||
model.SubmissionStatusUploaded,
|
||||
})
|
||||
submittedSubmissions, _, err := svc.inner.ListSubmissionsWithTotal(ctx, submittedSubmissionsFilter, model.Page{
|
||||
Number: 1,
|
||||
Size: 0,
|
||||
}, datastore.ListSortDisabled)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Get submitted mapfixes count (under review)
|
||||
submittedMapfixesFilter := service.NewMapfixFilter()
|
||||
submittedMapfixesFilter.SetStatuses([]model.MapfixStatus{
|
||||
model.MapfixStatusUnderConstruction,
|
||||
model.MapfixStatusChangesRequested,
|
||||
model.MapfixStatusSubmitting,
|
||||
model.MapfixStatusSubmitted,
|
||||
model.MapfixStatusAcceptedUnvalidated,
|
||||
model.MapfixStatusValidating,
|
||||
model.MapfixStatusValidated,
|
||||
model.MapfixStatusUploading,
|
||||
model.MapfixStatusUploaded,
|
||||
model.MapfixStatusReleasing,
|
||||
})
|
||||
submittedMapfixes, _, err := svc.inner.ListMapfixesWithTotal(ctx, submittedMapfixesFilter, model.Page{
|
||||
Number: 1,
|
||||
Size: 0,
|
||||
}, datastore.ListSortDisabled)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &api.Stats{
|
||||
TotalSubmissions: totalSubmissions,
|
||||
TotalMapfixes: totalMapfixes,
|
||||
ReleasedSubmissions: releasedSubmissions,
|
||||
ReleasedMapfixes: releasedMapfixes,
|
||||
SubmittedSubmissions: submittedSubmissions,
|
||||
SubmittedMapfixes: submittedMapfixes,
|
||||
}, nil
|
||||
}
|
||||
@@ -20,13 +20,6 @@ var(
|
||||
model.SubmissionStatusSubmitted,
|
||||
model.SubmissionStatusUnderConstruction,
|
||||
}
|
||||
// limit submissions in the pipeline to one per target map
|
||||
ActiveAcceptedSubmissionStatuses = []model.SubmissionStatus{
|
||||
model.SubmissionStatusUploading,
|
||||
model.SubmissionStatusValidated,
|
||||
model.SubmissionStatusValidating,
|
||||
model.SubmissionStatusAcceptedUnvalidated,
|
||||
}
|
||||
// Allow 5 submissions every 10 minutes
|
||||
CreateSubmissionRateLimit int64 = 5
|
||||
CreateSubmissionRecencyWindow = time.Second*600
|
||||
@@ -236,6 +229,9 @@ func (svc *Service) ListSubmissions(ctx context.Context, params api.ListSubmissi
|
||||
if asset_id, asset_id_ok := params.AssetID.Get(); asset_id_ok{
|
||||
filter.SetAssetID(uint64(asset_id))
|
||||
}
|
||||
if asset_version, asset_version_ok := params.AssetVersion.Get(); asset_version_ok{
|
||||
filter.SetAssetVersion(uint64(asset_version))
|
||||
}
|
||||
if uploaded_asset_id, uploaded_asset_id_ok := params.UploadedAssetID.Get(); uploaded_asset_id_ok{
|
||||
filter.SetUploadedAssetID(uint64(uploaded_asset_id))
|
||||
}
|
||||
@@ -441,7 +437,12 @@ func (svc *Service) ActionSubmissionRequestChanges(ctx context.Context, params a
|
||||
target_status := model.SubmissionStatusChangesRequested
|
||||
update := service.NewSubmissionUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allowed_statuses := []model.SubmissionStatus{model.SubmissionStatusValidated, model.SubmissionStatusAcceptedUnvalidated, model.SubmissionStatusSubmitted}
|
||||
allowed_statuses := []model.SubmissionStatus{
|
||||
model.SubmissionStatusUploaded,
|
||||
model.SubmissionStatusValidated,
|
||||
model.SubmissionStatusAcceptedUnvalidated,
|
||||
model.SubmissionStatusSubmitted,
|
||||
}
|
||||
err = svc.inner.UpdateSubmissionIfStatus(ctx, params.SubmissionID, allowed_statuses, update)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -555,7 +556,11 @@ func (svc *Service) ActionSubmissionTriggerSubmit(ctx context.Context, params ap
|
||||
target_status := model.SubmissionStatusSubmitting
|
||||
update := service.NewSubmissionUpdate()
|
||||
update.SetStatusID(target_status)
|
||||
allowed_statuses := []model.SubmissionStatus{model.SubmissionStatusUnderConstruction, model.SubmissionStatusChangesRequested}
|
||||
allowed_statuses := []model.SubmissionStatus{
|
||||
model.SubmissionStatusUnderConstruction,
|
||||
model.SubmissionStatusChangesRequested,
|
||||
model.SubmissionStatusSubmitted,
|
||||
}
|
||||
err = svc.inner.UpdateSubmissionIfStatus(ctx, params.SubmissionID, allowed_statuses, update)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -1038,19 +1043,24 @@ func (svc *Service) ActionSubmissionAccepted(ctx context.Context, params api.Act
|
||||
// Release a set of uploaded maps.
|
||||
//
|
||||
// POST /release-submissions
|
||||
func (svc *Service) ReleaseSubmissions(ctx context.Context, request []api.ReleaseInfo) error {
|
||||
func (svc *Service) ReleaseSubmissions(ctx context.Context, request []api.ReleaseInfo) (*api.OperationID, error) {
|
||||
userInfo, ok := ctx.Value("UserInfo").(UserInfoHandle)
|
||||
if !ok {
|
||||
return ErrUserInfo
|
||||
return nil, ErrUserInfo
|
||||
}
|
||||
|
||||
has_role, err := userInfo.HasRoleSubmissionRelease()
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
// check if caller has required role
|
||||
if !has_role {
|
||||
return ErrPermissionDeniedNeedRoleSubmissionRelease
|
||||
return nil, ErrPermissionDeniedNeedRoleSubmissionRelease
|
||||
}
|
||||
|
||||
userId, err := userInfo.GetUserID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
idList := make([]int64, len(request))
|
||||
@@ -1061,48 +1071,62 @@ func (svc *Service) ReleaseSubmissions(ctx context.Context, request []api.Releas
|
||||
// fetch submissions
|
||||
submissions, err := svc.inner.GetSubmissionList(ctx, idList)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// the submissions are not ordered the same as the idList!
|
||||
id_to_submission := make(map[int64]*model.Submission, len(submissions))
|
||||
|
||||
// check each submission to make sure it is ready to release
|
||||
for _,submission := range submissions{
|
||||
if submission.StatusID != model.SubmissionStatusUploaded{
|
||||
return ErrReleaseInvalidStatus
|
||||
return nil, ErrReleaseInvalidStatus
|
||||
}
|
||||
if submission.UploadedAssetID == 0{
|
||||
return ErrReleaseNoUploadedAssetID
|
||||
return nil, ErrReleaseNoUploadedAssetID
|
||||
}
|
||||
id_to_submission[submission.ID] = &submission
|
||||
}
|
||||
|
||||
for i,submission := range submissions{
|
||||
date := request[i].Date.Unix()
|
||||
// create each map with go-grpc
|
||||
_, err := svc.inner.CreateMap(ctx, model.Map{
|
||||
ID: int64(submission.UploadedAssetID),
|
||||
DisplayName: submission.DisplayName,
|
||||
Creator: submission.Creator,
|
||||
GameID: submission.GameID,
|
||||
Date: time.Unix(date, 0),
|
||||
Submitter: submission.Submitter,
|
||||
// Thumbnail: 0,
|
||||
// AssetVersion: 0,
|
||||
// LoadCount: 0,
|
||||
// Modes: 0,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update each status to Released
|
||||
update := service.NewSubmissionUpdate()
|
||||
update.SetStatusID(model.SubmissionStatusReleased)
|
||||
err = svc.inner.UpdateSubmissionIfStatus(ctx, submission.ID, []model.SubmissionStatus{model.SubmissionStatusUploaded}, update)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// construct batch release nats message
|
||||
release_submissions := make([]model.ReleaseSubmissionRequest, len(request))
|
||||
for i, release_info := range request {
|
||||
// from request
|
||||
release_submissions[i].ReleaseDate = release_info.Date.Unix()
|
||||
release_submissions[i].SubmissionID = release_info.SubmissionID
|
||||
submission := id_to_submission[release_info.SubmissionID]
|
||||
// from submission
|
||||
release_submissions[i].ModelID = submission.ValidatedAssetID
|
||||
release_submissions[i].ModelVersion = submission.ValidatedAssetVersion
|
||||
// for map create
|
||||
release_submissions[i].UploadedAssetID = submission.UploadedAssetID
|
||||
release_submissions[i].DisplayName = submission.DisplayName
|
||||
release_submissions[i].Creator = submission.Creator
|
||||
release_submissions[i].GameID = submission.GameID
|
||||
release_submissions[i].Submitter = submission.Submitter
|
||||
}
|
||||
|
||||
return nil
|
||||
// create a trackable long-running operation
|
||||
operation, err := svc.inner.CreateOperation(ctx, model.Operation{
|
||||
Owner: userId,
|
||||
StatusID: model.OperationStatusCreated,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this is a map fix
|
||||
err = svc.inner.NatsBatchReleaseSubmissions(
|
||||
release_submissions,
|
||||
operation.ID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &api.OperationID{
|
||||
OperationID: operation.ID,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// CreateSubmissionAuditComment implements createSubmissionAuditComment operation.
|
||||
|
||||
135
pkg/web_api/thumbnails.go
Normal file
135
pkg/web_api/thumbnails.go
Normal file
@@ -0,0 +1,135 @@
|
||||
package web_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/api"
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/roblox"
|
||||
)
|
||||
|
||||
// BatchAssetThumbnails handles batch fetching of asset thumbnails
|
||||
func (svc *Service) BatchAssetThumbnails(ctx context.Context, req *api.BatchAssetThumbnailsReq) (*api.BatchAssetThumbnailsOK, error) {
|
||||
if len(req.AssetIds) == 0 {
|
||||
return &api.BatchAssetThumbnailsOK{
|
||||
Thumbnails: api.NewOptBatchAssetThumbnailsOKThumbnails(map[string]string{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Convert size string to enum
|
||||
size := roblox.Size420x420
|
||||
if req.Size.IsSet() {
|
||||
sizeStr := req.Size.Value
|
||||
switch api.BatchAssetThumbnailsReqSize(sizeStr) {
|
||||
case api.BatchAssetThumbnailsReqSize150x150:
|
||||
size = roblox.Size150x150
|
||||
case api.BatchAssetThumbnailsReqSize768x432:
|
||||
size = roblox.Size768x432
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch thumbnails from service
|
||||
thumbnails, err := svc.inner.GetAssetThumbnails(ctx, req.AssetIds, size)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Convert map[uint64]string to map[string]string for JSON
|
||||
result := make(map[string]string, len(thumbnails))
|
||||
for assetID, url := range thumbnails {
|
||||
result[strconv.FormatUint(assetID, 10)] = url
|
||||
}
|
||||
|
||||
return &api.BatchAssetThumbnailsOK{
|
||||
Thumbnails: api.NewOptBatchAssetThumbnailsOKThumbnails(result),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetAssetThumbnail handles single asset thumbnail fetch (with redirect)
|
||||
func (svc *Service) GetAssetThumbnail(ctx context.Context, params api.GetAssetThumbnailParams) (*api.GetAssetThumbnailFound, error) {
|
||||
// Convert size string to enum
|
||||
size := roblox.Size420x420
|
||||
if params.Size.IsSet() {
|
||||
sizeStr := params.Size.Value
|
||||
switch api.GetAssetThumbnailSize(sizeStr) {
|
||||
case api.GetAssetThumbnailSize150x150:
|
||||
size = roblox.Size150x150
|
||||
case api.GetAssetThumbnailSize768x432:
|
||||
size = roblox.Size768x432
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch thumbnail
|
||||
thumbnailURL, err := svc.inner.GetSingleAssetThumbnail(ctx, params.AssetID, size)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Return redirect response
|
||||
return &api.GetAssetThumbnailFound{
|
||||
Location: api.NewOptString(thumbnailURL),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// BatchUserThumbnails handles batch fetching of user avatar thumbnails
|
||||
func (svc *Service) BatchUserThumbnails(ctx context.Context, req *api.BatchUserThumbnailsReq) (*api.BatchUserThumbnailsOK, error) {
|
||||
if len(req.UserIds) == 0 {
|
||||
return &api.BatchUserThumbnailsOK{
|
||||
Thumbnails: api.NewOptBatchUserThumbnailsOKThumbnails(map[string]string{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Convert size string to enum
|
||||
size := roblox.Size150x150
|
||||
if req.Size.IsSet() {
|
||||
sizeStr := req.Size.Value
|
||||
switch api.BatchUserThumbnailsReqSize(sizeStr) {
|
||||
case api.BatchUserThumbnailsReqSize420x420:
|
||||
size = roblox.Size420x420
|
||||
case api.BatchUserThumbnailsReqSize768x432:
|
||||
size = roblox.Size768x432
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch thumbnails from service
|
||||
thumbnails, err := svc.inner.GetUserAvatarThumbnails(ctx, req.UserIds, size)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Convert map[uint64]string to map[string]string for JSON
|
||||
result := make(map[string]string, len(thumbnails))
|
||||
for userID, url := range thumbnails {
|
||||
result[strconv.FormatUint(userID, 10)] = url
|
||||
}
|
||||
|
||||
return &api.BatchUserThumbnailsOK{
|
||||
Thumbnails: api.NewOptBatchUserThumbnailsOKThumbnails(result),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetUserThumbnail handles single user avatar thumbnail fetch (with redirect)
|
||||
func (svc *Service) GetUserThumbnail(ctx context.Context, params api.GetUserThumbnailParams) (*api.GetUserThumbnailFound, error) {
|
||||
// Convert size string to enum
|
||||
size := roblox.Size150x150
|
||||
if params.Size.IsSet() {
|
||||
sizeStr := params.Size.Value
|
||||
switch api.GetUserThumbnailSize(sizeStr) {
|
||||
case api.GetUserThumbnailSize420x420:
|
||||
size = roblox.Size420x420
|
||||
case api.GetUserThumbnailSize768x432:
|
||||
size = roblox.Size768x432
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch thumbnail
|
||||
thumbnailURL, err := svc.inner.GetSingleUserAvatarThumbnail(ctx, params.UserID, size)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Return redirect response
|
||||
return &api.GetUserThumbnailFound{
|
||||
Location: api.NewOptString(thumbnailURL),
|
||||
}, nil
|
||||
}
|
||||
33
pkg/web_api/users.go
Normal file
33
pkg/web_api/users.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package web_api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
"git.itzana.me/strafesnet/maps-service/pkg/api"
|
||||
)
|
||||
|
||||
// BatchUsernames handles batch fetching of usernames
|
||||
func (svc *Service) BatchUsernames(ctx context.Context, req *api.BatchUsernamesReq) (*api.BatchUsernamesOK, error) {
|
||||
if len(req.UserIds) == 0 {
|
||||
return &api.BatchUsernamesOK{
|
||||
Usernames: api.NewOptBatchUsernamesOKUsernames(map[string]string{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Fetch usernames from service
|
||||
usernames, err := svc.inner.GetUsernames(ctx, req.UserIds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Convert map[uint64]string to map[string]string for JSON
|
||||
result := make(map[string]string, len(usernames))
|
||||
for userID, username := range usernames {
|
||||
result[strconv.FormatUint(userID, 10)] = username
|
||||
}
|
||||
|
||||
return &api.BatchUsernamesOK{
|
||||
Usernames: api.NewOptBatchUsernamesOKUsernames(result),
|
||||
}, nil
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "submissions-api"
|
||||
version = "0.8.2"
|
||||
version = "0.10.1"
|
||||
edition = "2024"
|
||||
publish = ["strafesnet"]
|
||||
repository = "https://git.itzana.me/StrafesNET/maps-service"
|
||||
@@ -152,6 +152,48 @@ impl Context{
|
||||
|
||||
Ok(())
|
||||
}
|
||||
pub async fn get_mapfixes(&self,config:GetMapfixesRequest<'_>)->Result<MapfixesResponse,Error>{
|
||||
let url_raw=format!("{}/mapfixes",self.0.base_url);
|
||||
let mut url=reqwest::Url::parse(url_raw.as_str()).map_err(Error::Parse)?;
|
||||
|
||||
{
|
||||
let mut query_pairs=url.query_pairs_mut();
|
||||
query_pairs.append_pair("Page",config.Page.to_string().as_str());
|
||||
query_pairs.append_pair("Limit",config.Limit.to_string().as_str());
|
||||
if let Some(sort)=config.Sort{
|
||||
query_pairs.append_pair("Sort",(sort as u8).to_string().as_str());
|
||||
}
|
||||
if let Some(display_name)=config.DisplayName{
|
||||
query_pairs.append_pair("DisplayName",display_name);
|
||||
}
|
||||
if let Some(creator)=config.Creator{
|
||||
query_pairs.append_pair("Creator",creator);
|
||||
}
|
||||
if let Some(game_id)=config.GameID{
|
||||
query_pairs.append_pair("GameID",(game_id as u8).to_string().as_str());
|
||||
}
|
||||
if let Some(submitter)=config.Submitter{
|
||||
query_pairs.append_pair("Submitter",submitter.to_string().as_str());
|
||||
}
|
||||
if let Some(asset_id)=config.AssetID{
|
||||
query_pairs.append_pair("AssetID",asset_id.to_string().as_str());
|
||||
}
|
||||
if let Some(asset_version)=config.AssetVersion{
|
||||
query_pairs.append_pair("AssetVersion",asset_version.to_string().as_str());
|
||||
}
|
||||
if let Some(uploaded_asset_id)=config.TargetAssetID{
|
||||
query_pairs.append_pair("TargetAssetID",uploaded_asset_id.to_string().as_str());
|
||||
}
|
||||
if let Some(status_id)=config.StatusID{
|
||||
query_pairs.append_pair("StatusID",(status_id as u8).to_string().as_str());
|
||||
}
|
||||
}
|
||||
|
||||
response_ok(
|
||||
self.0.get(url).await.map_err(Error::Reqwest)?
|
||||
).await.map_err(Error::Response)?
|
||||
.json().await.map_err(Error::ReqwestJson)
|
||||
}
|
||||
pub async fn get_submissions(&self,config:GetSubmissionsRequest<'_>)->Result<SubmissionsResponse,Error>{
|
||||
let url_raw=format!("{}/submissions",self.0.base_url);
|
||||
let mut url=reqwest::Url::parse(url_raw.as_str()).map_err(Error::Parse)?;
|
||||
@@ -178,6 +220,9 @@ impl Context{
|
||||
if let Some(asset_id)=config.AssetID{
|
||||
query_pairs.append_pair("AssetID",asset_id.to_string().as_str());
|
||||
}
|
||||
if let Some(asset_version)=config.AssetVersion{
|
||||
query_pairs.append_pair("AssetVersion",asset_version.to_string().as_str());
|
||||
}
|
||||
if let Some(uploaded_asset_id)=config.UploadedAssetID{
|
||||
query_pairs.append_pair("UploadedAssetID",uploaded_asset_id.to_string().as_str());
|
||||
}
|
||||
@@ -218,7 +263,37 @@ impl Context{
|
||||
).await.map_err(Error::Response)?
|
||||
.json().await.map_err(Error::ReqwestJson)
|
||||
}
|
||||
pub async fn release_submissions(&self,config:ReleaseRequest<'_>)->Result<(),Error>{
|
||||
pub async fn get_mapfix_audit_events(&self,config:GetMapfixAuditEventsRequest)->Result<Vec<AuditEventReponse>,Error>{
|
||||
let url_raw=format!("{}/mapfixes/{}/audit-events",self.0.base_url,config.MapfixID);
|
||||
let mut url=reqwest::Url::parse(url_raw.as_str()).map_err(Error::Parse)?;
|
||||
|
||||
{
|
||||
let mut query_pairs=url.query_pairs_mut();
|
||||
query_pairs.append_pair("Page",config.Page.to_string().as_str());
|
||||
query_pairs.append_pair("Limit",config.Limit.to_string().as_str());
|
||||
}
|
||||
|
||||
response_ok(
|
||||
self.0.get(url).await.map_err(Error::Reqwest)?
|
||||
).await.map_err(Error::Response)?
|
||||
.json().await.map_err(Error::ReqwestJson)
|
||||
}
|
||||
pub async fn get_submission_audit_events(&self,config:GetSubmissionAuditEventsRequest)->Result<Vec<AuditEventReponse>,Error>{
|
||||
let url_raw=format!("{}/submissions/{}/audit-events",self.0.base_url,config.SubmissionID);
|
||||
let mut url=reqwest::Url::parse(url_raw.as_str()).map_err(Error::Parse)?;
|
||||
|
||||
{
|
||||
let mut query_pairs=url.query_pairs_mut();
|
||||
query_pairs.append_pair("Page",config.Page.to_string().as_str());
|
||||
query_pairs.append_pair("Limit",config.Limit.to_string().as_str());
|
||||
}
|
||||
|
||||
response_ok(
|
||||
self.0.get(url).await.map_err(Error::Reqwest)?
|
||||
).await.map_err(Error::Response)?
|
||||
.json().await.map_err(Error::ReqwestJson)
|
||||
}
|
||||
pub async fn release_submissions(&self,config:ReleaseRequest<'_>)->Result<OperationIDResponse,Error>{
|
||||
let url_raw=format!("{}/release-submissions",self.0.base_url);
|
||||
let url=reqwest::Url::parse(url_raw.as_str()).map_err(Error::Parse)?;
|
||||
|
||||
@@ -226,8 +301,7 @@ impl Context{
|
||||
|
||||
response_ok(
|
||||
self.0.post(url,body).await.map_err(Error::Reqwest)?
|
||||
).await.map_err(Error::Response)?;
|
||||
|
||||
Ok(())
|
||||
).await.map_err(Error::Response)?
|
||||
.json().await.map_err(Error::ReqwestJson)
|
||||
}
|
||||
}
|
||||
@@ -30,7 +30,6 @@ impl<Items> std::error::Error for SingleItemError<Items> where Items:std::fmt::D
|
||||
pub type ScriptSingleItemError=SingleItemError<Vec<ScriptID>>;
|
||||
pub type ScriptPolicySingleItemError=SingleItemError<Vec<ScriptPolicyID>>;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub struct UrlAndBody{
|
||||
pub url:url::Url,
|
||||
@@ -76,7 +75,7 @@ pub enum GameID{
|
||||
FlyTrials=5,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Serialize)]
|
||||
pub struct CreateMapfixRequest<'a>{
|
||||
pub OperationID:OperationID,
|
||||
@@ -89,13 +88,13 @@ pub struct CreateMapfixRequest<'a>{
|
||||
pub TargetAssetID:u64,
|
||||
pub Description:&'a str,
|
||||
}
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct MapfixIDResponse{
|
||||
pub MapfixID:MapfixID,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Serialize)]
|
||||
pub struct CreateSubmissionRequest<'a>{
|
||||
pub OperationID:OperationID,
|
||||
@@ -108,7 +107,7 @@ pub struct CreateSubmissionRequest<'a>{
|
||||
pub Status:u32,
|
||||
pub Roles:u32,
|
||||
}
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct SubmissionIDResponse{
|
||||
pub SubmissionID:SubmissionID,
|
||||
@@ -127,11 +126,11 @@ pub enum ResourceType{
|
||||
Submission=2,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
pub struct GetScriptRequest{
|
||||
pub ScriptID:ScriptID,
|
||||
}
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Serialize)]
|
||||
pub struct GetScriptsRequest<'a>{
|
||||
pub Page:u32,
|
||||
@@ -151,7 +150,7 @@ pub struct GetScriptsRequest<'a>{
|
||||
pub struct HashRequest<'a>{
|
||||
pub hash:&'a str,
|
||||
}
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct ScriptResponse{
|
||||
pub ID:ScriptID,
|
||||
@@ -161,7 +160,7 @@ pub struct ScriptResponse{
|
||||
pub ResourceType:ResourceType,
|
||||
pub ResourceID:ResourceID,
|
||||
}
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Serialize)]
|
||||
pub struct CreateScriptRequest<'a>{
|
||||
pub Name:&'a str,
|
||||
@@ -170,7 +169,7 @@ pub struct CreateScriptRequest<'a>{
|
||||
#[serde(skip_serializing_if="Option::is_none")]
|
||||
pub ResourceID:Option<ResourceID>,
|
||||
}
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct ScriptIDResponse{
|
||||
pub ScriptID:ScriptID,
|
||||
@@ -186,11 +185,11 @@ pub enum Policy{
|
||||
Replace=4,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
pub struct GetScriptPolicyRequest{
|
||||
pub ScriptPolicyID:ScriptPolicyID,
|
||||
}
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Serialize)]
|
||||
pub struct GetScriptPoliciesRequest<'a>{
|
||||
pub Page:u32,
|
||||
@@ -202,7 +201,7 @@ pub struct GetScriptPoliciesRequest<'a>{
|
||||
#[serde(skip_serializing_if="Option::is_none")]
|
||||
pub Policy:Option<Policy>,
|
||||
}
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct ScriptPolicyResponse{
|
||||
pub ID:ScriptPolicyID,
|
||||
@@ -210,20 +209,20 @@ pub struct ScriptPolicyResponse{
|
||||
pub ToScriptID:ScriptID,
|
||||
pub Policy:Policy
|
||||
}
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Serialize)]
|
||||
pub struct CreateScriptPolicyRequest{
|
||||
pub FromScriptID:ScriptID,
|
||||
pub ToScriptID:ScriptID,
|
||||
pub Policy:Policy,
|
||||
}
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct ScriptPolicyIDResponse{
|
||||
pub ScriptPolicyID:ScriptPolicyID,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Serialize)]
|
||||
pub struct UpdateScriptPolicyRequest{
|
||||
pub ID:ScriptPolicyID,
|
||||
@@ -235,7 +234,7 @@ pub struct UpdateScriptPolicyRequest{
|
||||
pub Policy:Option<Policy>,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct UpdateSubmissionModelRequest{
|
||||
pub SubmissionID:SubmissionID,
|
||||
@@ -252,6 +251,73 @@ pub enum Sort{
|
||||
DateDescending=4,
|
||||
}
|
||||
|
||||
#[derive(Clone,Debug,serde_repr::Serialize_repr,serde_repr::Deserialize_repr)]
|
||||
#[repr(u8)]
|
||||
pub enum MapfixStatus{
|
||||
// Phase: Creation
|
||||
UnderConstruction=0,
|
||||
ChangesRequested=1,
|
||||
|
||||
// Phase: Review
|
||||
Submitting=2,
|
||||
Submitted=3,
|
||||
|
||||
// Phase: Testing
|
||||
AcceptedUnvalidated=4, // pending script review, can re-trigger validation
|
||||
Validating=5,
|
||||
Validated=6,
|
||||
Uploading=7,
|
||||
Uploaded=8, // uploaded to the group, but pending release
|
||||
Releasing=11,
|
||||
|
||||
// Phase: Final MapfixStatus
|
||||
Rejected=9,
|
||||
Released=10,
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct GetMapfixesRequest<'a>{
|
||||
pub Page:u32,
|
||||
pub Limit:u32,
|
||||
pub Sort:Option<Sort>,
|
||||
pub DisplayName:Option<&'a str>,
|
||||
pub Creator:Option<&'a str>,
|
||||
pub GameID:Option<GameID>,
|
||||
pub Submitter:Option<u64>,
|
||||
pub AssetID:Option<u64>,
|
||||
pub AssetVersion:Option<u64>,
|
||||
pub TargetAssetID:Option<u64>,
|
||||
pub StatusID:Option<MapfixStatus>,
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Serialize,serde::Deserialize)]
|
||||
pub struct MapfixResponse{
|
||||
pub ID:MapfixID,
|
||||
pub DisplayName:String,
|
||||
pub Creator:String,
|
||||
pub GameID:u32,
|
||||
pub CreatedAt:i64,
|
||||
pub UpdatedAt:i64,
|
||||
pub Submitter:u64,
|
||||
pub AssetID:u64,
|
||||
pub AssetVersion:u64,
|
||||
pub ValidatedAssetID:Option<u64>,
|
||||
pub ValidatedAssetVersion:Option<u64>,
|
||||
pub Completed:bool,
|
||||
pub TargetAssetID:u64,
|
||||
pub StatusID:MapfixStatus,
|
||||
pub Description:String,
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct MapfixesResponse{
|
||||
pub Total:u64,
|
||||
pub Mapfixes:Vec<MapfixResponse>,
|
||||
}
|
||||
|
||||
#[derive(Clone,Debug,serde_repr::Deserialize_repr)]
|
||||
#[repr(u8)]
|
||||
pub enum SubmissionStatus{
|
||||
@@ -275,7 +341,7 @@ pub enum SubmissionStatus{
|
||||
Released=10,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct GetSubmissionsRequest<'a>{
|
||||
pub Page:u32,
|
||||
@@ -286,11 +352,12 @@ pub struct GetSubmissionsRequest<'a>{
|
||||
pub GameID:Option<GameID>,
|
||||
pub Submitter:Option<u64>,
|
||||
pub AssetID:Option<u64>,
|
||||
pub AssetVersion:Option<u64>,
|
||||
pub UploadedAssetID:Option<u64>,
|
||||
pub StatusID:Option<SubmissionStatus>,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct SubmissionResponse{
|
||||
pub ID:SubmissionID,
|
||||
@@ -302,18 +369,20 @@ pub struct SubmissionResponse{
|
||||
pub Submitter:u64,
|
||||
pub AssetID:u64,
|
||||
pub AssetVersion:u64,
|
||||
pub ValidatedAssetID:Option<u64>,
|
||||
pub ValidatedAssetVersion:Option<u64>,
|
||||
pub UploadedAssetID:u64,
|
||||
pub StatusID:SubmissionStatus,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct SubmissionsResponse{
|
||||
pub Total:u64,
|
||||
pub Submissions:Vec<SubmissionResponse>,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct GetMapsRequest<'a>{
|
||||
pub Page:u32,
|
||||
@@ -324,7 +393,7 @@ pub struct GetMapsRequest<'a>{
|
||||
pub GameID:Option<GameID>,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct MapResponse{
|
||||
pub ID:i64,
|
||||
@@ -334,7 +403,119 @@ pub struct MapResponse{
|
||||
pub Date:i64,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct GetMapfixAuditEventsRequest{
|
||||
pub Page:u32,
|
||||
pub Limit:u32,
|
||||
pub MapfixID:i64,
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct GetSubmissionAuditEventsRequest{
|
||||
pub Page:u32,
|
||||
pub Limit:u32,
|
||||
pub SubmissionID:i64,
|
||||
}
|
||||
|
||||
#[derive(Clone,Debug,serde_repr::Deserialize_repr)]
|
||||
#[repr(u32)]
|
||||
pub enum AuditEventType{
|
||||
Action=0,
|
||||
Comment=1,
|
||||
ChangeModel=2,
|
||||
ChangeValidatedModel=3,
|
||||
ChangeDisplayName=4,
|
||||
ChangeCreator=5,
|
||||
Error=6,
|
||||
CheckList=7,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct AuditEventAction{
|
||||
pub target_status:MapfixStatus,
|
||||
}
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct AuditEventComment{
|
||||
pub comment:String,
|
||||
}
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct AuditEventChangeModel{
|
||||
pub old_model_id:u64,
|
||||
pub old_model_version:u64,
|
||||
pub new_model_id:u64,
|
||||
pub new_model_version:u64,
|
||||
}
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct AuditEventChangeValidatedModel{
|
||||
pub validated_model_id:u64,
|
||||
pub validated_model_version:u64,
|
||||
}
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct AuditEventChangeName{
|
||||
pub old_name:String,
|
||||
pub new_name:String,
|
||||
}
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct AuditEventError{
|
||||
pub error:String,
|
||||
}
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct AuditEventCheck{
|
||||
pub name:String,
|
||||
pub summary:String,
|
||||
pub passed:bool,
|
||||
}
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct AuditEventCheckList{
|
||||
pub check_list:Vec<AuditEventCheck>,
|
||||
}
|
||||
|
||||
#[derive(Clone,Debug)]
|
||||
pub enum AuditEventData{
|
||||
Action(AuditEventAction),
|
||||
Comment(AuditEventComment),
|
||||
ChangeModel(AuditEventChangeModel),
|
||||
ChangeValidatedModel(AuditEventChangeValidatedModel),
|
||||
ChangeDisplayName(AuditEventChangeName),
|
||||
ChangeCreator(AuditEventChangeName),
|
||||
Error(AuditEventError),
|
||||
CheckList(AuditEventCheckList),
|
||||
}
|
||||
|
||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,serde::Serialize,serde::Deserialize)]
|
||||
pub struct AuditEventID(pub(crate)i64);
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct AuditEventReponse{
|
||||
pub ID:AuditEventID,
|
||||
pub Date:i64,
|
||||
pub User:u64,
|
||||
pub Username:String,
|
||||
pub ResourceType:ResourceType,
|
||||
pub ResourceID:ResourceID,
|
||||
pub EventType:AuditEventType,
|
||||
EventData:serde_json::Value,
|
||||
}
|
||||
impl AuditEventReponse{
|
||||
pub fn data(self)->serde_json::Result<AuditEventData>{
|
||||
Ok(match self.EventType{
|
||||
AuditEventType::Action=>AuditEventData::Action(serde_json::from_value(self.EventData)?),
|
||||
AuditEventType::Comment=>AuditEventData::Comment(serde_json::from_value(self.EventData)?),
|
||||
AuditEventType::ChangeModel=>AuditEventData::ChangeModel(serde_json::from_value(self.EventData)?),
|
||||
AuditEventType::ChangeValidatedModel=>AuditEventData::ChangeValidatedModel(serde_json::from_value(self.EventData)?),
|
||||
AuditEventType::ChangeDisplayName=>AuditEventData::ChangeDisplayName(serde_json::from_value(self.EventData)?),
|
||||
AuditEventType::ChangeCreator=>AuditEventData::ChangeCreator(serde_json::from_value(self.EventData)?),
|
||||
AuditEventType::Error=>AuditEventData::Error(serde_json::from_value(self.EventData)?),
|
||||
AuditEventType::CheckList=>AuditEventData::CheckList(serde_json::from_value(self.EventData)?),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Serialize)]
|
||||
pub struct Check{
|
||||
pub Name:&'static str,
|
||||
@@ -342,7 +523,7 @@ pub struct Check{
|
||||
pub Passed:bool,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct ActionSubmissionSubmittedRequest{
|
||||
pub SubmissionID:SubmissionID,
|
||||
@@ -352,33 +533,33 @@ pub struct ActionSubmissionSubmittedRequest{
|
||||
pub GameID:GameID,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct ActionSubmissionRequestChangesRequest{
|
||||
pub SubmissionID:SubmissionID,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct ActionSubmissionUploadedRequest{
|
||||
pub SubmissionID:SubmissionID,
|
||||
pub UploadedAssetID:u64,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct ActionSubmissionAcceptedRequest{
|
||||
pub SubmissionID:SubmissionID,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct CreateSubmissionAuditErrorRequest{
|
||||
pub SubmissionID:SubmissionID,
|
||||
pub ErrorMessage:String,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct CreateSubmissionAuditCheckListRequest<'a>{
|
||||
pub SubmissionID:SubmissionID,
|
||||
@@ -387,8 +568,16 @@ pub struct CreateSubmissionAuditCheckListRequest<'a>{
|
||||
|
||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,serde::Serialize,serde::Deserialize)]
|
||||
pub struct SubmissionID(pub(crate)i64);
|
||||
impl SubmissionID{
|
||||
pub const fn new(value:i64)->Self{
|
||||
Self(value)
|
||||
}
|
||||
pub const fn value(&self)->i64{
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct UpdateMapfixModelRequest{
|
||||
pub MapfixID:MapfixID,
|
||||
@@ -396,7 +585,7 @@ pub struct UpdateMapfixModelRequest{
|
||||
pub ModelVersion:u64,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct ActionMapfixSubmittedRequest{
|
||||
pub MapfixID:MapfixID,
|
||||
@@ -406,32 +595,32 @@ pub struct ActionMapfixSubmittedRequest{
|
||||
pub GameID:GameID,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct ActionMapfixRequestChangesRequest{
|
||||
pub MapfixID:MapfixID,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct ActionMapfixUploadedRequest{
|
||||
pub MapfixID:MapfixID,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct ActionMapfixAcceptedRequest{
|
||||
pub MapfixID:MapfixID,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct CreateMapfixAuditErrorRequest{
|
||||
pub MapfixID:MapfixID,
|
||||
pub ErrorMessage:String,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct CreateMapfixAuditCheckListRequest<'a>{
|
||||
pub MapfixID:MapfixID,
|
||||
@@ -440,8 +629,16 @@ pub struct CreateMapfixAuditCheckListRequest<'a>{
|
||||
|
||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,serde::Serialize,serde::Deserialize)]
|
||||
pub struct MapfixID(pub(crate)i64);
|
||||
impl MapfixID{
|
||||
pub const fn new(value:i64)->Self{
|
||||
Self(value)
|
||||
}
|
||||
pub const fn value(&self)->i64{
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug)]
|
||||
pub struct ActionOperationFailedRequest{
|
||||
pub OperationID:OperationID,
|
||||
@@ -468,7 +665,7 @@ impl Resource{
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Serialize)]
|
||||
pub struct ReleaseInfo{
|
||||
pub SubmissionID:SubmissionID,
|
||||
@@ -478,3 +675,8 @@ pub struct ReleaseInfo{
|
||||
pub struct ReleaseRequest<'a>{
|
||||
pub schedule:&'a [ReleaseInfo],
|
||||
}
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(Clone,Debug,serde::Deserialize)]
|
||||
pub struct OperationIDResponse{
|
||||
pub OperationID:OperationID,
|
||||
}
|
||||
@@ -4,18 +4,18 @@ version = "0.1.1"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
async-nats = "0.42.0"
|
||||
async-nats = "0.45.0"
|
||||
futures = "0.3.31"
|
||||
rbx_asset = { version = "0.4.9", features = ["gzip", "rustls-tls"], default-features = false, registry = "strafesnet" }
|
||||
rbx_binary = "1.0.0"
|
||||
rbx_dom_weak = "3.0.0"
|
||||
rbx_reflection_database = "1.0.3"
|
||||
rbx_xml = "1.0.0"
|
||||
rbx_asset = { version = "0.5.0", features = ["gzip", "rustls-tls"], default-features = false, registry = "strafesnet" }
|
||||
rbx_binary = "2.0.0"
|
||||
rbx_dom_weak = "4.0.0"
|
||||
rbx_reflection_database = "2.0.1"
|
||||
rbx_xml = "2.0.0"
|
||||
regex = { version = "1.11.3", default-features = false }
|
||||
serde = { version = "1.0.215", features = ["derive"] }
|
||||
serde_json = "1.0.133"
|
||||
siphasher = "1.0.1"
|
||||
tokio = { version = "1.41.1", features = ["macros", "rt-multi-thread", "signal"] }
|
||||
heck = "0.5.0"
|
||||
lazy-regex = "3.4.1"
|
||||
rust-grpc = { version = "1.2.1", registry = "strafesnet" }
|
||||
tonic = "0.13.1"
|
||||
rust-grpc = { version = "1.6.1", registry = "strafesnet" }
|
||||
tonic = "0.14.1"
|
||||
|
||||
@@ -6,7 +6,7 @@ use heck::{ToSnakeCase,ToTitleCase};
|
||||
use rbx_dom_weak::Instance;
|
||||
use rust_grpc::validator::Check;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
ModelInfoDownload(rbx_asset::cloud::GetError),
|
||||
@@ -24,7 +24,16 @@ impl std::fmt::Display for Error{
|
||||
}
|
||||
impl std::error::Error for Error{}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
macro_rules! lazy_regex{
|
||||
($r:literal)=>{{
|
||||
use regex::Regex;
|
||||
use std::sync::LazyLock;
|
||||
static RE:LazyLock<Regex>=LazyLock::new(||Regex::new($r).unwrap());
|
||||
&RE
|
||||
}};
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
pub struct CheckRequest{
|
||||
ModelID:u64,
|
||||
SkipChecks:bool,
|
||||
@@ -47,12 +56,20 @@ impl From<crate::nats_types::CheckSubmissionRequest> for CheckRequest{
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq)]
|
||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,Ord,PartialOrd)]
|
||||
struct ModeID(u64);
|
||||
impl ModeID{
|
||||
const MAIN:Self=Self(0);
|
||||
const BONUS:Self=Self(1);
|
||||
}
|
||||
impl std::fmt::Display for ModeID{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
match self{
|
||||
&ModeID::MAIN=>write!(f,"Main"),
|
||||
&ModeID(mode_id)=>write!(f,"Bonus{mode_id}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
enum Zone{
|
||||
Start,
|
||||
Finish,
|
||||
@@ -62,7 +79,7 @@ struct ModeElement{
|
||||
zone:Zone,
|
||||
mode_id:ModeID,
|
||||
}
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
pub enum IDParseError{
|
||||
NoCaptures,
|
||||
ParseInt(core::num::ParseIntError),
|
||||
@@ -79,7 +96,7 @@ impl std::str::FromStr for ModeElement{
|
||||
"BonusFinish"=>Ok(Self{zone:Zone::Finish,mode_id:ModeID::BONUS}),
|
||||
"BonusAnticheat"=>Ok(Self{zone:Zone::Anticheat,mode_id:ModeID::BONUS}),
|
||||
other=>{
|
||||
let everything_pattern=lazy_regex::lazy_regex!(r"^Bonus(\d+)Start$|^BonusStart(\d+)$|^Bonus(\d+)Finish$|^BonusFinish(\d+)$|^Bonus(\d+)Anticheat$|^BonusAnticheat(\d+)$");
|
||||
let everything_pattern=lazy_regex!(r"^Bonus(\d+)Start$|^BonusStart(\d+)$|^Bonus(\d+)Finish$|^BonusFinish(\d+)$|^Bonus(\d+)Anticheat$|^BonusAnticheat(\d+)$");
|
||||
if let Some(captures)=everything_pattern.captures(other){
|
||||
if let Some(mode_id)=captures.get(1).or(captures.get(2)){
|
||||
return Ok(Self{
|
||||
@@ -139,16 +156,16 @@ impl std::str::FromStr for StageElement{
|
||||
type Err=IDParseError;
|
||||
fn from_str(s:&str)->Result<Self,Self::Err>{
|
||||
// Trigger ForceTrigger Teleport ForceTeleport SpawnAt ForceSpawnAt
|
||||
let bonus_start_pattern=lazy_regex::lazy_regex!(r"^(?:Force)?(Teleport|SpawnAt|Trigger)(\d+)$");
|
||||
if let Some(captures)=bonus_start_pattern.captures(s){
|
||||
let teleport_pattern=lazy_regex!(r"^(?:Force)?(Teleport|SpawnAt|Trigger)(\d+)$");
|
||||
if let Some(captures)=teleport_pattern.captures(s){
|
||||
return Ok(StageElement{
|
||||
behaviour:StageElementBehaviour::Teleport,
|
||||
stage_id:StageID(captures[1].parse().map_err(IDParseError::ParseInt)?),
|
||||
});
|
||||
}
|
||||
// Spawn
|
||||
let bonus_finish_pattern=lazy_regex::lazy_regex!(r"^Spawn(\d+)$");
|
||||
if let Some(captures)=bonus_finish_pattern.captures(s){
|
||||
let spawn_pattern=lazy_regex!(r"^Spawn(\d+)$");
|
||||
if let Some(captures)=spawn_pattern.captures(s){
|
||||
return Ok(StageElement{
|
||||
behaviour:StageElementBehaviour::Spawn,
|
||||
stage_id:StageID(captures[1].parse().map_err(IDParseError::ParseInt)?),
|
||||
@@ -180,15 +197,15 @@ struct WormholeElement{
|
||||
impl std::str::FromStr for WormholeElement{
|
||||
type Err=IDParseError;
|
||||
fn from_str(s:&str)->Result<Self,Self::Err>{
|
||||
let bonus_start_pattern=lazy_regex::lazy_regex!(r"^WormholeIn(\d+)$");
|
||||
if let Some(captures)=bonus_start_pattern.captures(s){
|
||||
let wormhole_in_pattern=lazy_regex!(r"^WormholeIn(\d+)$");
|
||||
if let Some(captures)=wormhole_in_pattern.captures(s){
|
||||
return Ok(Self{
|
||||
behaviour:WormholeBehaviour::In,
|
||||
wormhole_id:WormholeID(captures[1].parse().map_err(IDParseError::ParseInt)?),
|
||||
});
|
||||
}
|
||||
let bonus_finish_pattern=lazy_regex::lazy_regex!(r"^WormholeOut(\d+)$");
|
||||
if let Some(captures)=bonus_finish_pattern.captures(s){
|
||||
let wormhole_out_pattern=lazy_regex!(r"^WormholeOut(\d+)$");
|
||||
if let Some(captures)=wormhole_out_pattern.captures(s){
|
||||
return Ok(Self{
|
||||
behaviour:WormholeBehaviour::Out,
|
||||
wormhole_id:WormholeID(captures[1].parse().map_err(IDParseError::ParseInt)?),
|
||||
@@ -206,6 +223,15 @@ impl std::fmt::Display for WormholeElement{
|
||||
}
|
||||
}
|
||||
|
||||
fn count_sequential(modes:&HashMap<ModeID,Vec<&Instance>>)->usize{
|
||||
for mode_id in 0..modes.len(){
|
||||
if !modes.contains_key(&ModeID(mode_id as u64)){
|
||||
return mode_id;
|
||||
}
|
||||
}
|
||||
return modes.len();
|
||||
}
|
||||
|
||||
/// Count various map elements
|
||||
#[derive(Default)]
|
||||
struct Counts<'a>{
|
||||
@@ -225,6 +251,24 @@ pub struct ModelInfo<'a>{
|
||||
counts:Counts<'a>,
|
||||
unanchored_parts:Vec<&'a Instance>,
|
||||
}
|
||||
impl ModelInfo<'_>{
|
||||
pub fn count_modes(&self)->Option<usize>{
|
||||
let start_zones_count=self.counts.mode_start_counts.len();
|
||||
let finish_zones_count=self.counts.mode_finish_counts.len();
|
||||
let sequential_start_zones=count_sequential(&self.counts.mode_start_counts);
|
||||
let sequential_finish_zones=count_sequential(&self.counts.mode_finish_counts);
|
||||
// all counts must match
|
||||
if start_zones_count==finish_zones_count
|
||||
&& sequential_start_zones==sequential_finish_zones
|
||||
&& start_zones_count==sequential_start_zones
|
||||
&& finish_zones_count==sequential_finish_zones
|
||||
{
|
||||
Some(start_zones_count)
|
||||
}else{
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_model_info<'a>(dom:&'a rbx_dom_weak::WeakDom,model_instance:&'a rbx_dom_weak::Instance)->ModelInfo<'a>{
|
||||
// extract model info
|
||||
@@ -237,7 +281,7 @@ pub fn get_model_info<'a>(dom:&'a rbx_dom_weak::WeakDom,model_instance:&'a rbx_d
|
||||
let mut unanchored_parts=Vec::new();
|
||||
let anchored_ustr=rbx_dom_weak::ustr("Anchored");
|
||||
|
||||
let db=rbx_reflection_database::get();
|
||||
let db=rbx_reflection_database::get().unwrap();
|
||||
let base_part=&db.classes["BasePart"];
|
||||
let base_parts=dom.descendants_of(model_instance.referent()).filter(|&instance|
|
||||
db.classes.get(instance.class.as_str()).is_some_and(|class|
|
||||
@@ -398,7 +442,7 @@ pub struct MapInfoOwned{
|
||||
pub creator:String,
|
||||
pub game_id:GameID,
|
||||
}
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum IntoMapInfoOwnedError{
|
||||
DisplayName(StringValueError),
|
||||
@@ -446,6 +490,8 @@ struct MapCheck<'a>{
|
||||
mode_finish_counts:SetDifferenceCheck<SetDifferenceCheckContextAtLeastOne<ModeID,Vec<&'a Instance>>>,
|
||||
// Check for dangling MapAnticheat zones (no associated MapStart)
|
||||
mode_anticheat_counts:SetDifferenceCheck<SetDifferenceCheckContextAllowNone<ModeID,Vec<&'a Instance>>>,
|
||||
// Check that modes are sequential
|
||||
modes_sequential:Result<(),Vec<ModeID>>,
|
||||
// Spawn1 must exist
|
||||
spawn1:Result<Exists,Absent>,
|
||||
// Check for dangling Teleport# (no associated Spawn#)
|
||||
@@ -514,6 +560,25 @@ impl<'a> ModelInfo<'a>{
|
||||
let mode_anticheat_counts=SetDifferenceCheckContextAllowNone::new(self.counts.mode_anticheat_counts)
|
||||
.check(&self.counts.mode_start_counts);
|
||||
|
||||
// There must not be non-sequential modes. If Bonus100 exists, Bonuses 1-99 had better also exist.
|
||||
let modes_sequential={
|
||||
let sequential=count_sequential(&self.counts.mode_start_counts);
|
||||
if sequential==self.counts.mode_start_counts.len(){
|
||||
Ok(())
|
||||
}else{
|
||||
let mut non_sequential=Vec::with_capacity(self.counts.mode_start_counts.len()-sequential);
|
||||
for (&mode_id,_) in &self.counts.mode_start_counts{
|
||||
let ModeID(mode_id_u64)=mode_id;
|
||||
if !(mode_id_u64<sequential as u64){
|
||||
non_sequential.push(mode_id);
|
||||
}
|
||||
}
|
||||
// sort so it's prettier when it prints out
|
||||
non_sequential.sort();
|
||||
Err(non_sequential)
|
||||
}
|
||||
};
|
||||
|
||||
// There must be exactly one start zone for every mode in the map.
|
||||
let mode_start_counts=DuplicateCheckContext(self.counts.mode_start_counts).check(|c|1<c.len());
|
||||
|
||||
@@ -550,6 +615,7 @@ impl<'a> ModelInfo<'a>{
|
||||
mode_start_counts,
|
||||
mode_finish_counts,
|
||||
mode_anticheat_counts,
|
||||
modes_sequential,
|
||||
spawn1,
|
||||
teleport_counts,
|
||||
spawn_counts,
|
||||
@@ -573,6 +639,7 @@ impl MapCheck<'_>{
|
||||
mode_start_counts:DuplicateCheck(Ok(())),
|
||||
mode_finish_counts:SetDifferenceCheck(Ok(())),
|
||||
mode_anticheat_counts:SetDifferenceCheck(Ok(())),
|
||||
modes_sequential:Ok(()),
|
||||
spawn1:Ok(Exists),
|
||||
teleport_counts:SetDifferenceCheck(Ok(())),
|
||||
spawn_counts:DuplicateCheck(Ok(())),
|
||||
@@ -746,6 +813,15 @@ impl MapCheck<'_>{
|
||||
}
|
||||
}
|
||||
};
|
||||
let sequential_modes=match &self.modes_sequential{
|
||||
Ok(())=>passed!("SequentialModes"),
|
||||
Err(context)=>{
|
||||
let non_sequential=context.len();
|
||||
let plural_non_sequential=if non_sequential==1{"mode"}else{"modes"};
|
||||
let comma_separated=Separated::new(", ",||context);
|
||||
summary_format!("SequentialModes","{non_sequential} {plural_non_sequential} should use a lower ModeID (no gaps): {comma_separated}")
|
||||
}
|
||||
};
|
||||
let spawn1=match &self.spawn1{
|
||||
Ok(Exists)=>passed!("Spawn1"),
|
||||
Err(Absent)=>summary_format!("Spawn1","Model has no Spawn1"),
|
||||
@@ -824,6 +900,7 @@ impl MapCheck<'_>{
|
||||
extra_finish,
|
||||
missing_finish,
|
||||
dangling_anticheat,
|
||||
sequential_modes,
|
||||
spawn1,
|
||||
dangling_teleport,
|
||||
duplicate_spawns,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::check::CheckListAndVersion;
|
||||
use crate::nats_types::CheckMapfixRequest;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
Check(crate::check::Error),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::check::CheckListAndVersion;
|
||||
use crate::nats_types::CheckSubmissionRequest;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
Check(crate::check::Error),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::download::download_asset_version;
|
||||
use crate::rbx_util::{get_root_instance,get_mapinfo,read_dom,MapInfo,ReadDomError,GetRootInstanceError,GameID};
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
CreatorTypeMustBeUser,
|
||||
@@ -17,11 +17,11 @@ impl std::fmt::Display for Error{
|
||||
}
|
||||
impl std::error::Error for Error{}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
pub struct CreateRequest{
|
||||
pub ModelID:u64,
|
||||
}
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
pub struct CreateResult{
|
||||
pub AssetOwner:u64,
|
||||
pub DisplayName:Option<String>,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::nats_types::CreateMapfixRequest;
|
||||
use crate::create::CreateRequest;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
Create(crate::create::Error),
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::nats_types::CreateSubmissionRequest;
|
||||
use crate::create::CreateRequest;
|
||||
use crate::rbx_util::GameID;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
Create(crate::create::Error),
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
ModelLocationDownload(rbx_asset::cloud::GetError),
|
||||
|
||||
@@ -18,6 +18,9 @@ impl Service{
|
||||
endpoint!(set_status_submitted,SubmittedRequest,NullResponse);
|
||||
endpoint!(set_status_request_changes,MapfixId,NullResponse);
|
||||
endpoint!(set_status_validated,MapfixId,NullResponse);
|
||||
endpoint!(set_status_failed,MapfixId,NullResponse);
|
||||
endpoint!(set_status_not_validated,MapfixId,NullResponse);
|
||||
endpoint!(set_status_uploaded,MapfixId,NullResponse);
|
||||
endpoint!(set_status_not_uploaded,MapfixId,NullResponse);
|
||||
endpoint!(set_status_released,MapfixReleaseRequest,NullResponse);
|
||||
endpoint!(set_status_not_released,MapfixId,NullResponse);
|
||||
}
|
||||
|
||||
@@ -11,5 +11,6 @@ impl Service{
|
||||
)->Self{
|
||||
Self{client}
|
||||
}
|
||||
endpoint!(success,OperationSuccessRequest,NullResponse);
|
||||
endpoint!(fail,OperationFailRequest,NullResponse);
|
||||
}
|
||||
|
||||
@@ -18,6 +18,8 @@ impl Service{
|
||||
endpoint!(set_status_submitted,SubmittedRequest,NullResponse);
|
||||
endpoint!(set_status_request_changes,SubmissionId,NullResponse);
|
||||
endpoint!(set_status_validated,SubmissionId,NullResponse);
|
||||
endpoint!(set_status_failed,SubmissionId,NullResponse);
|
||||
endpoint!(set_status_not_validated,SubmissionId,NullResponse);
|
||||
endpoint!(set_status_uploaded,StatusUploadedRequest,NullResponse);
|
||||
endpoint!(set_status_not_uploaded,SubmissionId,NullResponse);
|
||||
endpoint!(set_status_released,SubmissionReleaseRequest,NullResponse);
|
||||
}
|
||||
|
||||
@@ -13,13 +13,15 @@ mod check_submission;
|
||||
mod create;
|
||||
mod create_mapfix;
|
||||
mod create_submission;
|
||||
mod release;
|
||||
mod release_mapfix;
|
||||
mod release_submissions_batch;
|
||||
mod upload_mapfix;
|
||||
mod upload_submission;
|
||||
mod validator;
|
||||
mod validate_mapfix;
|
||||
mod validate_submission;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum StartupError{
|
||||
API(tonic::transport::Error),
|
||||
@@ -47,24 +49,44 @@ async fn main()->Result<(),StartupError>{
|
||||
},
|
||||
Err(e)=>panic!("{e}: ROBLOX_GROUP_ID env required"),
|
||||
};
|
||||
let load_asset_version_place_id=std::env::var("LOAD_ASSET_VERSION_PLACE_ID").expect("LOAD_ASSET_VERSION_PLACE_ID env required").parse().expect("LOAD_ASSET_VERSION_PLACE_ID int parse failed");
|
||||
let load_asset_version_universe_id=std::env::var("LOAD_ASSET_VERSION_UNIVERSE_ID").expect("LOAD_ASSET_VERSION_UNIVERSE_ID env required").parse().expect("LOAD_ASSET_VERSION_UNIVERSE_ID int parse failed");
|
||||
|
||||
// create / upload models through STRAFESNET_CI2 account
|
||||
let cookie=std::env::var("RBXCOOKIE").expect("RBXCOOKIE env required");
|
||||
let cookie_context=rbx_asset::cookie::Context::new(rbx_asset::cookie::Cookie::new(cookie));
|
||||
// download models through cloud api
|
||||
// download models through cloud api (STRAFESNET_CI2 account)
|
||||
let api_key=std::env::var("RBX_API_KEY").expect("RBX_API_KEY env required");
|
||||
let cloud_context=rbx_asset::cloud::Context::new(rbx_asset::cloud::ApiKey::new(api_key));
|
||||
// luau execution cloud api (StrafesNET group)
|
||||
let api_key=std::env::var("RBX_API_KEY_LUAU_EXECUTION").expect("RBX_API_KEY_LUAU_EXECUTION env required");
|
||||
let cloud_context_luau_execution=rbx_asset::cloud::Context::new(rbx_asset::cloud::ApiKey::new(api_key));
|
||||
|
||||
// maps-service api
|
||||
let api_host_internal=std::env::var("API_HOST_INTERNAL").expect("API_HOST_INTERNAL env required");
|
||||
let endpoint=tonic::transport::Endpoint::new(api_host_internal).map_err(StartupError::API)?;
|
||||
let channel=endpoint.connect_lazy();
|
||||
let mapfixes=crate::grpc::mapfixes::ValidatorMapfixesServiceClient::new(channel.clone());
|
||||
let operations=crate::grpc::operations::ValidatorOperationsServiceClient::new(channel.clone());
|
||||
let scripts=crate::grpc::scripts::ValidatorScriptsServiceClient::new(channel.clone());
|
||||
let script_policy=crate::grpc::script_policy::ValidatorScriptPolicyServiceClient::new(channel.clone());
|
||||
let submissions=crate::grpc::submissions::ValidatorSubmissionsServiceClient::new(channel);
|
||||
let message_handler=message_handler::MessageHandler::new(cloud_context,cookie_context,group_id,mapfixes,operations,scripts,script_policy,submissions);
|
||||
let mapfixes=crate::grpc::mapfixes::Service::new(crate::grpc::mapfixes::ValidatorMapfixesServiceClient::new(channel.clone()));
|
||||
let operations=crate::grpc::operations::Service::new(crate::grpc::operations::ValidatorOperationsServiceClient::new(channel.clone()));
|
||||
let scripts=crate::grpc::scripts::Service::new(crate::grpc::scripts::ValidatorScriptsServiceClient::new(channel.clone()));
|
||||
let script_policy=crate::grpc::script_policy::Service::new(crate::grpc::script_policy::ValidatorScriptPolicyServiceClient::new(channel.clone()));
|
||||
let submissions=crate::grpc::submissions::Service::new(crate::grpc::submissions::ValidatorSubmissionsServiceClient::new(channel.clone()));
|
||||
let load_asset_version_runtime=rbx_asset::cloud::LuauSessionLatestRequest{
|
||||
place_id:load_asset_version_place_id,
|
||||
universe_id:load_asset_version_universe_id,
|
||||
};
|
||||
let message_handler=message_handler::MessageHandler{
|
||||
cloud_context,
|
||||
cookie_context,
|
||||
cloud_context_luau_execution,
|
||||
group_id,
|
||||
load_asset_version_runtime,
|
||||
mapfixes,
|
||||
operations,
|
||||
scripts,
|
||||
script_policy,
|
||||
submissions,
|
||||
};
|
||||
|
||||
// nats
|
||||
let nats_host=std::env::var("NATS_HOST").expect("NATS_HOST env required");
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum HandleMessageError{
|
||||
Messages(async_nats::jetstream::consumer::pull::MessagesError),
|
||||
@@ -9,6 +9,8 @@ pub enum HandleMessageError{
|
||||
CreateSubmission(tonic::Status),
|
||||
CheckMapfix(crate::check_mapfix::Error),
|
||||
CheckSubmission(crate::check_submission::Error),
|
||||
ReleaseMapfix(crate::release_mapfix::Error),
|
||||
ReleaseSubmissionsBatch(crate::release_submissions_batch::Error),
|
||||
UploadMapfix(crate::upload_mapfix::Error),
|
||||
UploadSubmission(crate::upload_submission::Error),
|
||||
ValidateMapfix(crate::validate_mapfix::Error),
|
||||
@@ -30,7 +32,9 @@ fn from_slice<'a,T:serde::de::Deserialize<'a>>(slice:&'a [u8])->Result<T,HandleM
|
||||
pub struct MessageHandler{
|
||||
pub(crate) cloud_context:rbx_asset::cloud::Context,
|
||||
pub(crate) cookie_context:rbx_asset::cookie::Context,
|
||||
pub(crate) cloud_context_luau_execution:rbx_asset::cloud::Context,
|
||||
pub(crate) group_id:Option<u64>,
|
||||
pub(crate) load_asset_version_runtime:rbx_asset::cloud::LuauSessionLatestRequest,
|
||||
pub(crate) mapfixes:crate::grpc::mapfixes::Service,
|
||||
pub(crate) operations:crate::grpc::operations::Service,
|
||||
pub(crate) scripts:crate::grpc::scripts::Service,
|
||||
@@ -39,27 +43,6 @@ pub struct MessageHandler{
|
||||
}
|
||||
|
||||
impl MessageHandler{
|
||||
pub fn new(
|
||||
cloud_context:rbx_asset::cloud::Context,
|
||||
cookie_context:rbx_asset::cookie::Context,
|
||||
group_id:Option<u64>,
|
||||
mapfixes:crate::grpc::mapfixes::ValidatorMapfixesServiceClient,
|
||||
operations:crate::grpc::operations::ValidatorOperationsServiceClient,
|
||||
scripts:crate::grpc::scripts::ValidatorScriptsServiceClient,
|
||||
script_policy:crate::grpc::script_policy::ValidatorScriptPolicyServiceClient,
|
||||
submissions:crate::grpc::submissions::ValidatorSubmissionsServiceClient,
|
||||
)->Self{
|
||||
Self{
|
||||
cloud_context,
|
||||
cookie_context,
|
||||
group_id,
|
||||
mapfixes:crate::grpc::mapfixes::Service::new(mapfixes),
|
||||
operations:crate::grpc::operations::Service::new(operations),
|
||||
scripts:crate::grpc::scripts::Service::new(scripts),
|
||||
script_policy:crate::grpc::script_policy::Service::new(script_policy),
|
||||
submissions:crate::grpc::submissions::Service::new(submissions),
|
||||
}
|
||||
}
|
||||
pub async fn handle_message_result(&self,message_result:MessageResult)->Result<(),HandleMessageError>{
|
||||
let message=message_result.map_err(HandleMessageError::Messages)?;
|
||||
message.double_ack().await.map_err(HandleMessageError::DoubleAck)?;
|
||||
@@ -68,6 +51,8 @@ impl MessageHandler{
|
||||
"maptest.submissions.create"=>self.create_submission(from_slice(&message.payload)?).await.map_err(HandleMessageError::CreateSubmission),
|
||||
"maptest.mapfixes.check"=>self.check_mapfix(from_slice(&message.payload)?).await.map_err(HandleMessageError::CheckMapfix),
|
||||
"maptest.submissions.check"=>self.check_submission(from_slice(&message.payload)?).await.map_err(HandleMessageError::CheckSubmission),
|
||||
"maptest.mapfixes.release"=>self.release_mapfix(from_slice(&message.payload)?).await.map_err(HandleMessageError::ReleaseMapfix),
|
||||
"maptest.submissions.batchrelease"=>self.release_submissions_batch(from_slice(&message.payload)?).await.map_err(HandleMessageError::ReleaseSubmissionsBatch),
|
||||
"maptest.mapfixes.upload"=>self.upload_mapfix(from_slice(&message.payload)?).await.map_err(HandleMessageError::UploadMapfix),
|
||||
"maptest.submissions.upload"=>self.upload_submission(from_slice(&message.payload)?).await.map_err(HandleMessageError::UploadSubmission),
|
||||
"maptest.mapfixes.validate"=>self.validate_mapfix(from_slice(&message.payload)?).await.map_err(HandleMessageError::ValidateMapfix),
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
// Requests are sent from maps-service to validator
|
||||
// Validation invokes the REST api to update the submissions
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct CreateSubmissionRequest{
|
||||
// operation_id is passed back in the response message
|
||||
@@ -18,7 +18,7 @@ pub struct CreateSubmissionRequest{
|
||||
pub Roles:u32,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct CreateMapfixRequest{
|
||||
pub OperationID:u32,
|
||||
@@ -27,7 +27,7 @@ pub struct CreateMapfixRequest{
|
||||
pub Description:String,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct CheckSubmissionRequest{
|
||||
pub SubmissionID:u64,
|
||||
@@ -35,7 +35,7 @@ pub struct CheckSubmissionRequest{
|
||||
pub SkipChecks:bool,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct CheckMapfixRequest{
|
||||
pub MapfixID:u64,
|
||||
@@ -43,7 +43,7 @@ pub struct CheckMapfixRequest{
|
||||
pub SkipChecks:bool,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct ValidateSubmissionRequest{
|
||||
// submission_id is passed back in the response message
|
||||
@@ -53,7 +53,7 @@ pub struct ValidateSubmissionRequest{
|
||||
pub ValidatedModelID:Option<u64>,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct ValidateMapfixRequest{
|
||||
// submission_id is passed back in the response message
|
||||
@@ -64,7 +64,7 @@ pub struct ValidateMapfixRequest{
|
||||
}
|
||||
|
||||
// Create a new map
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct UploadSubmissionRequest{
|
||||
pub SubmissionID:u64,
|
||||
@@ -73,7 +73,7 @@ pub struct UploadSubmissionRequest{
|
||||
pub ModelName:String,
|
||||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct UploadMapfixRequest{
|
||||
pub MapfixID:u64,
|
||||
@@ -81,3 +81,34 @@ pub struct UploadMapfixRequest{
|
||||
pub ModelVersion:u64,
|
||||
pub TargetAssetID:u64,
|
||||
}
|
||||
|
||||
// Release a new map
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct ReleaseSubmissionRequest{
|
||||
pub SubmissionID:u64,
|
||||
pub ReleaseDate:i64,
|
||||
pub ModelID:u64,
|
||||
pub ModelVersion:u64,
|
||||
pub UploadedAssetID:u64,
|
||||
pub DisplayName:String,
|
||||
pub Creator:String,
|
||||
pub GameID:u32,
|
||||
pub Submitter:u64,
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct ReleaseSubmissionsBatchRequest{
|
||||
pub Submissions:Vec<ReleaseSubmissionRequest>,
|
||||
pub OperationID:u32,
|
||||
}
|
||||
|
||||
#[expect(nonstandard_style)]
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct ReleaseMapfixRequest{
|
||||
pub MapfixID:u64,
|
||||
pub ModelID:u64,
|
||||
pub ModelVersion:u64,
|
||||
pub TargetAssetID:u64,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum ReadDomError{
|
||||
Binary(rbx_binary::DecodeError),
|
||||
@@ -112,3 +111,21 @@ pub fn get_mapinfo<'a>(dom:&'a rbx_dom_weak::WeakDom,model_instance:&rbx_dom_wea
|
||||
game_id:model_instance.name.parse(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_luau_result_exp_backoff(
|
||||
context:&rbx_asset::cloud::Context,
|
||||
luau_session:&rbx_asset::cloud::LuauSessionResponse
|
||||
)->Result<Result<rbx_asset::cloud::LuauResults,rbx_asset::cloud::LuauError>,rbx_asset::cloud::LuauSessionError>{
|
||||
const BACKOFF_MUL:f32=1.395_612_5;//exp(1/3)
|
||||
let mut backoff=1000f32;
|
||||
loop{
|
||||
match luau_session.try_get_result(context).await{
|
||||
//try again when the operation is not done
|
||||
Err(rbx_asset::cloud::LuauSessionError::NotDone)=>(),
|
||||
//return all other results
|
||||
other_result=>return other_result,
|
||||
}
|
||||
tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
|
||||
backoff*=BACKOFF_MUL;
|
||||
}
|
||||
}
|
||||
|
||||
104
validation/src/release.rs
Normal file
104
validation/src/release.rs
Normal file
@@ -0,0 +1,104 @@
|
||||
use crate::rbx_util::read_dom;
|
||||
|
||||
#[expect(unused)]
|
||||
#[derive(Debug)]
|
||||
pub enum ModesError{
|
||||
ApiActionMapfixReleased(tonic::Status),
|
||||
ModelFileDecode(crate::rbx_util::ReadDomError),
|
||||
GetRootInstance(crate::rbx_util::GetRootInstanceError),
|
||||
NonSequentialModes,
|
||||
TooManyModes(usize),
|
||||
}
|
||||
impl std::fmt::Display for ModesError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for ModesError{}
|
||||
|
||||
// decode and get modes function
|
||||
pub fn count_modes(maybe_gzip:rbx_asset::types::MaybeGzippedBytes)->Result<u32,ModesError>{
|
||||
// decode dom (slow!)
|
||||
let dom=maybe_gzip.read_with(read_dom,read_dom).map_err(ModesError::ModelFileDecode)?;
|
||||
|
||||
// extract the root instance
|
||||
let model_instance=crate::rbx_util::get_root_instance(&dom).map_err(ModesError::GetRootInstance)?;
|
||||
|
||||
// extract information from the model
|
||||
let model_info=crate::check::get_model_info(&dom,model_instance);
|
||||
|
||||
// count modes
|
||||
let modes=model_info.count_modes().ok_or(ModesError::NonSequentialModes)?;
|
||||
|
||||
// hard limit LOL
|
||||
let modes=if modes<u32::MAX as usize{
|
||||
modes as u32
|
||||
}else{
|
||||
return Err(ModesError::TooManyModes(modes));
|
||||
};
|
||||
|
||||
Ok(modes)
|
||||
}
|
||||
|
||||
#[expect(unused)]
|
||||
#[derive(Debug)]
|
||||
pub enum LoadAssetVersionsError{
|
||||
CreateSession(rbx_asset::cloud::CreateError),
|
||||
NonPositiveNumber(serde_json::Value),
|
||||
Script(rbx_asset::cloud::LuauError),
|
||||
InvalidResult(Vec<serde_json::Value>),
|
||||
LuauSession(rbx_asset::cloud::LuauSessionError),
|
||||
}
|
||||
impl std::fmt::Display for LoadAssetVersionsError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for LoadAssetVersionsError{}
|
||||
|
||||
// get asset versions in bulk using Roblox Luau API
|
||||
pub async fn load_asset_versions<I:IntoIterator<Item=u64>>(
|
||||
context:&rbx_asset::cloud::Context,
|
||||
runtime:&rbx_asset::cloud::LuauSessionLatestRequest,
|
||||
assets:I,
|
||||
)->Result<Vec<u64>,LoadAssetVersionsError>{
|
||||
// construct script with inline IDs
|
||||
// TODO: concurrent execution
|
||||
let mut script="local InsertService=game:GetService(\"InsertService\")\nreturn\n".to_string();
|
||||
for asset in assets{
|
||||
use std::fmt::Write;
|
||||
write!(script,"InsertService:GetLatestAssetVersionAsync({asset}),\n").unwrap();
|
||||
}
|
||||
|
||||
let session=rbx_asset::cloud::LuauSessionCreate{
|
||||
script:&script[..script.len()-2],
|
||||
user:None,
|
||||
timeout:None,
|
||||
binaryInput:None,
|
||||
enableBinaryOutput:None,
|
||||
binaryOutputUri:None,
|
||||
};
|
||||
let session_response=context.create_luau_session(runtime,session).await.map_err(LoadAssetVersionsError::CreateSession)?;
|
||||
|
||||
let result=crate::rbx_util::get_luau_result_exp_backoff(&context,&session_response).await;
|
||||
|
||||
// * Note that only one mapfix can be active per map
|
||||
// * so it's theoretically impossible for the map to be updated unexpectedly.
|
||||
// * This means that the incremental asset version does not
|
||||
// * need to be checked before and after the load asset version is checked.
|
||||
|
||||
match result{
|
||||
Ok(Ok(rbx_asset::cloud::LuauResults{results}))=>{
|
||||
results.into_iter().map(|load_asset_version|
|
||||
match load_asset_version.as_u64(){
|
||||
Some(version)=>Ok(version),
|
||||
None=>Err(LoadAssetVersionsError::NonPositiveNumber(load_asset_version))
|
||||
}
|
||||
).collect()
|
||||
},
|
||||
Ok(Err(e))=>Err(LoadAssetVersionsError::Script(e)),
|
||||
Err(e)=>Err(LoadAssetVersionsError::LuauSession(e)),
|
||||
}
|
||||
|
||||
// * Don't need to check asset version to make sure it hasn't been updated
|
||||
}
|
||||
101
validation/src/release_mapfix.rs
Normal file
101
validation/src/release_mapfix.rs
Normal file
@@ -0,0 +1,101 @@
|
||||
use crate::download::download_asset_version;
|
||||
use crate::nats_types::ReleaseMapfixRequest;
|
||||
use crate::release::{count_modes,load_asset_versions};
|
||||
|
||||
#[expect(unused)]
|
||||
#[derive(Debug)]
|
||||
pub enum InnerError{
|
||||
Download(crate::download::Error),
|
||||
Modes(crate::release::ModesError),
|
||||
LoadAssetVersions(crate::release::LoadAssetVersionsError),
|
||||
LoadAssetVersionsListLength,
|
||||
}
|
||||
impl std::fmt::Display for InnerError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for InnerError{}
|
||||
|
||||
async fn release_inner(
|
||||
cloud_context:&rbx_asset::cloud::Context,
|
||||
cloud_context_luau_execution:&rbx_asset::cloud::Context,
|
||||
load_asset_version_runtime:&rbx_asset::cloud::LuauSessionLatestRequest,
|
||||
release_info:ReleaseMapfixRequest,
|
||||
)->Result<rust_grpc::validator::MapfixReleaseRequest,InnerError>{
|
||||
// download the map model
|
||||
let maybe_gzip=download_asset_version(cloud_context,rbx_asset::cloud::GetAssetVersionRequest{
|
||||
asset_id:release_info.ModelID,
|
||||
version:release_info.ModelVersion,
|
||||
}).await.map_err(InnerError::Download)?;
|
||||
|
||||
// count modes
|
||||
let modes=count_modes(maybe_gzip).map_err(InnerError::Modes)?;
|
||||
|
||||
// fetch load asset version
|
||||
let load_asset_versions=load_asset_versions(
|
||||
cloud_context_luau_execution,
|
||||
load_asset_version_runtime,
|
||||
[release_info.TargetAssetID],
|
||||
).await.map_err(InnerError::LoadAssetVersions)?;
|
||||
|
||||
// exactly one value in the results
|
||||
let &[load_asset_version]=load_asset_versions.as_slice()else{
|
||||
return Err(InnerError::LoadAssetVersionsListLength);
|
||||
};
|
||||
|
||||
Ok(rust_grpc::validator::MapfixReleaseRequest{
|
||||
mapfix_id:release_info.MapfixID,
|
||||
target_asset_id:release_info.TargetAssetID,
|
||||
asset_version:load_asset_version,
|
||||
modes:modes,
|
||||
})
|
||||
}
|
||||
|
||||
#[expect(unused)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
ApiActionMapfixRelease(tonic::Status),
|
||||
}
|
||||
impl std::fmt::Display for Error{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for Error{}
|
||||
|
||||
impl crate::message_handler::MessageHandler{
|
||||
pub async fn release_mapfix(&self,release_info:ReleaseMapfixRequest)->Result<(),Error>{
|
||||
let mapfix_id=release_info.MapfixID;
|
||||
let result=release_inner(
|
||||
&self.cloud_context,
|
||||
&self.cloud_context_luau_execution,
|
||||
&self.load_asset_version_runtime,
|
||||
release_info,
|
||||
).await;
|
||||
|
||||
match result{
|
||||
Ok(request)=>{
|
||||
// update map metadata
|
||||
self.mapfixes.set_status_released(request).await.map_err(Error::ApiActionMapfixRelease)?;
|
||||
},
|
||||
Err(e)=>{
|
||||
// log error
|
||||
println!("[release_mapfix] Error: {e}");
|
||||
|
||||
// post an error message to the audit log
|
||||
self.mapfixes.create_audit_error(rust_grpc::validator::AuditErrorRequest{
|
||||
id:mapfix_id,
|
||||
error_message:e.to_string(),
|
||||
}).await.map_err(Error::ApiActionMapfixRelease)?;
|
||||
|
||||
// update the mapfix model status to uploaded
|
||||
self.mapfixes.set_status_not_released(rust_grpc::validator::MapfixId{
|
||||
id:mapfix_id,
|
||||
}).await.map_err(Error::ApiActionMapfixRelease)?;
|
||||
},
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
227
validation/src/release_submissions_batch.rs
Normal file
227
validation/src/release_submissions_batch.rs
Normal file
@@ -0,0 +1,227 @@
|
||||
use futures::StreamExt;
|
||||
|
||||
use crate::download::download_asset_version;
|
||||
use crate::nats_types::ReleaseSubmissionsBatchRequest;
|
||||
use crate::release::{count_modes,load_asset_versions};
|
||||
|
||||
|
||||
#[expect(unused)]
|
||||
#[derive(Debug)]
|
||||
pub enum DownloadFutError{
|
||||
Download(crate::download::Error),
|
||||
Join(tokio::task::JoinError),
|
||||
Modes(crate::release::ModesError),
|
||||
}
|
||||
impl std::fmt::Display for DownloadFutError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for DownloadFutError{}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ErrorContext<E>{
|
||||
submission_id:u64,
|
||||
error:E,
|
||||
}
|
||||
impl<E:std::fmt::Debug> std::fmt::Display for ErrorContext<E>{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"SubmissionID({})={:?}",self.submission_id,self.error)
|
||||
}
|
||||
}
|
||||
impl<E:std::fmt::Debug> std::error::Error for ErrorContext<E>{}
|
||||
|
||||
async fn download_fut(
|
||||
cloud_context:&rbx_asset::cloud::Context,
|
||||
asset_version:rbx_asset::cloud::GetAssetVersionRequest,
|
||||
)->Result<u32,DownloadFutError>{
|
||||
// download
|
||||
let maybe_gzip=download_asset_version(cloud_context,asset_version)
|
||||
.await
|
||||
.map_err(DownloadFutError::Download)?;
|
||||
|
||||
// count modes in a green thread
|
||||
let modes=tokio::task::spawn_blocking(||
|
||||
count_modes(maybe_gzip)
|
||||
)
|
||||
.await
|
||||
.map_err(DownloadFutError::Join)?
|
||||
.map_err(DownloadFutError::Modes)?;
|
||||
|
||||
Ok::<_,DownloadFutError>(modes)
|
||||
}
|
||||
|
||||
#[expect(unused)]
|
||||
#[derive(Debug)]
|
||||
pub enum InnerError{
|
||||
Io(std::io::Error),
|
||||
LoadAssetVersions(crate::release::LoadAssetVersionsError),
|
||||
LoadAssetVersionsListLength,
|
||||
DownloadFutErrors(Vec<ErrorContext<DownloadFutError>>),
|
||||
ReleaseErrors(Vec<ErrorContext<tonic::Status>>),
|
||||
}
|
||||
impl std::fmt::Display for InnerError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for InnerError{}
|
||||
|
||||
const MAX_PARALLEL_DECODE:usize=6;
|
||||
const MAX_CONCURRENT_RELEASE:usize=16;
|
||||
|
||||
async fn release_inner(
|
||||
release_info:ReleaseSubmissionsBatchRequest,
|
||||
cloud_context:&rbx_asset::cloud::Context,
|
||||
cloud_context_luau_execution:&rbx_asset::cloud::Context,
|
||||
load_asset_version_runtime:&rbx_asset::cloud::LuauSessionLatestRequest,
|
||||
submissions:&crate::grpc::submissions::Service,
|
||||
)->Result<(),InnerError>{
|
||||
let available_parallelism=std::thread::available_parallelism().map_err(InnerError::Io)?.get();
|
||||
// set up futures
|
||||
|
||||
// unnecessary allocation :(
|
||||
let asset_versions:Vec<_> =release_info
|
||||
.Submissions
|
||||
.iter()
|
||||
.map(|submission|rbx_asset::cloud::GetAssetVersionRequest{
|
||||
asset_id:submission.ModelID,
|
||||
version:submission.ModelVersion,
|
||||
})
|
||||
.enumerate()
|
||||
.collect();
|
||||
|
||||
// fut_download
|
||||
let fut_download=futures::stream::iter(asset_versions)
|
||||
.map(|(index,asset_version)|async move{
|
||||
let modes=download_fut(cloud_context,asset_version).await;
|
||||
(index,modes)
|
||||
})
|
||||
.buffer_unordered(available_parallelism.min(MAX_PARALLEL_DECODE))
|
||||
.collect::<Vec<(usize,Result<_,DownloadFutError>)>>();
|
||||
|
||||
// fut_luau
|
||||
let fut_load_asset_versions=load_asset_versions(
|
||||
cloud_context_luau_execution,
|
||||
load_asset_version_runtime,
|
||||
release_info.Submissions.iter().map(|submission|submission.UploadedAssetID),
|
||||
);
|
||||
|
||||
// execute futures
|
||||
let (mut modes_unordered,load_asset_versions_result)=tokio::join!(fut_download,fut_load_asset_versions);
|
||||
|
||||
let load_asset_versions=load_asset_versions_result.map_err(InnerError::LoadAssetVersions)?;
|
||||
|
||||
// sanity check roblox output
|
||||
if load_asset_versions.len()!=release_info.Submissions.len(){
|
||||
return Err(InnerError::LoadAssetVersionsListLength);
|
||||
};
|
||||
|
||||
// rip asymptotic complexity (hash map would be better)
|
||||
modes_unordered.sort_by_key(|&(index,_)|index);
|
||||
|
||||
// check modes calculations for all success
|
||||
let mut modes=Vec::with_capacity(modes_unordered.len());
|
||||
let mut errors=Vec::with_capacity(modes_unordered.len());
|
||||
for (index,result) in modes_unordered{
|
||||
match result{
|
||||
Ok(value)=>modes.push(value),
|
||||
Err(error)=>errors.push(ErrorContext{
|
||||
submission_id:release_info.Submissions[index].SubmissionID,
|
||||
error:error,
|
||||
}),
|
||||
}
|
||||
}
|
||||
if !errors.is_empty(){
|
||||
return Err(InnerError::DownloadFutErrors(errors));
|
||||
}
|
||||
|
||||
// concurrently dispatch results
|
||||
let release_results:Vec<_> =futures::stream::iter(
|
||||
release_info
|
||||
.Submissions
|
||||
.into_iter()
|
||||
.zip(modes)
|
||||
.zip(load_asset_versions)
|
||||
.map(|((submission,modes),asset_version)|async move{
|
||||
let result=submissions.set_status_released(rust_grpc::validator::SubmissionReleaseRequest{
|
||||
submission_id:submission.SubmissionID,
|
||||
map_create:Some(rust_grpc::maps_extended::MapCreate{
|
||||
id:submission.UploadedAssetID as i64,
|
||||
display_name:submission.DisplayName,
|
||||
creator:submission.Creator,
|
||||
game_id:submission.GameID,
|
||||
date:submission.ReleaseDate,
|
||||
submitter:submission.Submitter,
|
||||
thumbnail:0,
|
||||
asset_version,
|
||||
modes,
|
||||
}),
|
||||
}).await;
|
||||
(submission.SubmissionID,result)
|
||||
})
|
||||
)
|
||||
.buffer_unordered(MAX_CONCURRENT_RELEASE)
|
||||
.collect().await;
|
||||
|
||||
// check for errors
|
||||
let errors:Vec<_> =
|
||||
release_results
|
||||
.into_iter()
|
||||
.filter_map(|(submission_id,result)|
|
||||
result.err().map(|e|ErrorContext{
|
||||
submission_id,
|
||||
error:e,
|
||||
})
|
||||
)
|
||||
.collect();
|
||||
|
||||
if !errors.is_empty(){
|
||||
return Err(InnerError::ReleaseErrors(errors));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
UpdateOperation(tonic::Status),
|
||||
}
|
||||
impl std::fmt::Display for Error{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for Error{}
|
||||
|
||||
impl crate::message_handler::MessageHandler{
|
||||
pub async fn release_submissions_batch(&self,release_info:ReleaseSubmissionsBatchRequest)->Result<(),Error>{
|
||||
let operation_id=release_info.OperationID;
|
||||
let result=release_inner(
|
||||
release_info,
|
||||
&self.cloud_context,
|
||||
&self.cloud_context_luau_execution,
|
||||
&self.load_asset_version_runtime,
|
||||
&self.submissions,
|
||||
).await;
|
||||
|
||||
match result{
|
||||
Ok(())=>{
|
||||
// operation success
|
||||
self.operations.success(rust_grpc::validator::OperationSuccessRequest{
|
||||
operation_id,
|
||||
path:String::new(),
|
||||
}).await.map_err(Error::UpdateOperation)?;
|
||||
},
|
||||
Err(e)=>{
|
||||
// operation error
|
||||
self.operations.fail(rust_grpc::validator::OperationFailRequest{
|
||||
operation_id,
|
||||
status_message:e.to_string(),
|
||||
}).await.map_err(Error::UpdateOperation)?;
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -5,8 +5,6 @@ pub struct MapfixID(pub(crate)u64);
|
||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,serde::Serialize,serde::Deserialize)]
|
||||
pub struct SubmissionID(pub(crate)u64);
|
||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,serde::Serialize,serde::Deserialize)]
|
||||
pub struct OperationID(pub(crate)u64);
|
||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,serde::Serialize,serde::Deserialize)]
|
||||
pub struct ResourceID(pub(crate)u64);
|
||||
#[derive(Clone,Copy,Debug,Hash,Eq,PartialEq,serde::Serialize,serde::Deserialize)]
|
||||
pub struct ScriptID(pub(crate)u64);
|
||||
|
||||
@@ -1,13 +1,51 @@
|
||||
use crate::download::download_asset_version;
|
||||
use crate::nats_types::UploadMapfixRequest;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
pub enum InnerError{
|
||||
Download(crate::download::Error),
|
||||
IO(std::io::Error),
|
||||
Json(serde_json::Error),
|
||||
Upload(rbx_asset::cookie::UploadError),
|
||||
}
|
||||
impl std::fmt::Display for InnerError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for InnerError{}
|
||||
|
||||
async fn upload_inner(
|
||||
upload_info:UploadMapfixRequest,
|
||||
cloud_context:&rbx_asset::cloud::Context,
|
||||
cookie_context:&rbx_asset::cookie::Context,
|
||||
group_id:Option<u64>,
|
||||
)->Result<(),InnerError>{
|
||||
// download the map model
|
||||
let maybe_gzip=download_asset_version(cloud_context,rbx_asset::cloud::GetAssetVersionRequest{
|
||||
asset_id:upload_info.ModelID,
|
||||
version:upload_info.ModelVersion,
|
||||
}).await.map_err(InnerError::Download)?;
|
||||
|
||||
// transparently handle gzipped models
|
||||
let model_data=maybe_gzip.to_vec().map_err(InnerError::IO)?;
|
||||
|
||||
// upload the map to the strafesnet group
|
||||
let _upload_response=cookie_context.upload(rbx_asset::cookie::UploadRequest{
|
||||
assetid:upload_info.TargetAssetID,
|
||||
groupId:group_id,
|
||||
name:None,
|
||||
description:None,
|
||||
ispublic:None,
|
||||
allowComments:None,
|
||||
},model_data).await.map_err(InnerError::Upload)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
ApiActionMapfixUploaded(tonic::Status),
|
||||
}
|
||||
impl std::fmt::Display for Error{
|
||||
@@ -19,31 +57,39 @@ impl std::error::Error for Error{}
|
||||
|
||||
impl crate::message_handler::MessageHandler{
|
||||
pub async fn upload_mapfix(&self,upload_info:UploadMapfixRequest)->Result<(),Error>{
|
||||
// download the map model
|
||||
let maybe_gzip=download_asset_version(&self.cloud_context,rbx_asset::cloud::GetAssetVersionRequest{
|
||||
asset_id:upload_info.ModelID,
|
||||
version:upload_info.ModelVersion,
|
||||
}).await.map_err(Error::Download)?;
|
||||
let mapfix_id=upload_info.MapfixID;
|
||||
let result=upload_inner(
|
||||
upload_info,
|
||||
&self.cloud_context,
|
||||
&self.cookie_context,
|
||||
self.group_id,
|
||||
).await;
|
||||
|
||||
// transparently handle gzipped models
|
||||
let model_data=maybe_gzip.to_vec().map_err(Error::IO)?;
|
||||
// update the mapfix depending on the result
|
||||
match result{
|
||||
Ok(())=>{
|
||||
// mark mapfix as uploaded, TargetAssetID is unchanged
|
||||
self.mapfixes.set_status_uploaded(rust_grpc::validator::MapfixId{
|
||||
id:mapfix_id,
|
||||
}).await.map_err(Error::ApiActionMapfixUploaded)?;
|
||||
},
|
||||
Err(e)=>{
|
||||
// log error
|
||||
println!("[upload_mapfix] Error: {e}");
|
||||
|
||||
// upload the map to the strafesnet group
|
||||
let _upload_response=self.cookie_context.upload(rbx_asset::cookie::UploadRequest{
|
||||
assetid:upload_info.TargetAssetID,
|
||||
groupId:self.group_id,
|
||||
name:None,
|
||||
description:None,
|
||||
ispublic:None,
|
||||
allowComments:None,
|
||||
},model_data).await.map_err(Error::Upload)?;
|
||||
self.mapfixes.create_audit_error(
|
||||
rust_grpc::validator::AuditErrorRequest{
|
||||
id:mapfix_id,
|
||||
error_message:e.to_string(),
|
||||
}
|
||||
).await.map_err(Error::ApiActionMapfixUploaded)?;
|
||||
|
||||
// that's it, the database entry does not need to be changed.
|
||||
|
||||
// mark mapfix as uploaded, TargetAssetID is unchanged
|
||||
self.mapfixes.set_status_uploaded(rust_grpc::validator::MapfixId{
|
||||
id:upload_info.MapfixID,
|
||||
}).await.map_err(Error::ApiActionMapfixUploaded)?;
|
||||
// update the mapfix model status to accepted
|
||||
self.mapfixes.set_status_not_uploaded(rust_grpc::validator::MapfixId{
|
||||
id:mapfix_id,
|
||||
}).await.map_err(Error::ApiActionMapfixUploaded)?;
|
||||
},
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,14 +1,52 @@
|
||||
use crate::download::download_asset_version;
|
||||
use crate::nats_types::UploadSubmissionRequest;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
pub enum InnerError{
|
||||
Download(crate::download::Error),
|
||||
IO(std::io::Error),
|
||||
Json(serde_json::Error),
|
||||
Create(rbx_asset::cookie::CreateError),
|
||||
SystemTime(std::time::SystemTimeError),
|
||||
}
|
||||
impl std::fmt::Display for InnerError{
|
||||
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
||||
write!(f,"{self:?}")
|
||||
}
|
||||
}
|
||||
impl std::error::Error for InnerError{}
|
||||
|
||||
async fn upload_inner(
|
||||
upload_info:UploadSubmissionRequest,
|
||||
cloud_context:&rbx_asset::cloud::Context,
|
||||
cookie_context:&rbx_asset::cookie::Context,
|
||||
group_id:Option<u64>,
|
||||
)->Result<u64,InnerError>{
|
||||
// download the map model
|
||||
let maybe_gzip=download_asset_version(cloud_context,rbx_asset::cloud::GetAssetVersionRequest{
|
||||
asset_id:upload_info.ModelID,
|
||||
version:upload_info.ModelVersion,
|
||||
}).await.map_err(InnerError::Download)?;
|
||||
|
||||
// transparently handle gzipped models
|
||||
let model_data=maybe_gzip.to_vec().map_err(InnerError::IO)?;
|
||||
|
||||
// upload the map to the strafesnet group
|
||||
let upload_response=cookie_context.create(rbx_asset::cookie::CreateRequest{
|
||||
name:upload_info.ModelName.clone(),
|
||||
description:"".to_owned(),
|
||||
ispublic:false,
|
||||
allowComments:false,
|
||||
groupId:group_id,
|
||||
},model_data).await.map_err(InnerError::Create)?;
|
||||
|
||||
Ok(upload_response.AssetId)
|
||||
}
|
||||
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
ApiActionSubmissionUploaded(tonic::Status),
|
||||
}
|
||||
impl std::fmt::Display for Error{
|
||||
@@ -20,29 +58,40 @@ impl std::error::Error for Error{}
|
||||
|
||||
impl crate::message_handler::MessageHandler{
|
||||
pub async fn upload_submission(&self,upload_info:UploadSubmissionRequest)->Result<(),Error>{
|
||||
// download the map model
|
||||
let maybe_gzip=download_asset_version(&self.cloud_context,rbx_asset::cloud::GetAssetVersionRequest{
|
||||
asset_id:upload_info.ModelID,
|
||||
version:upload_info.ModelVersion,
|
||||
}).await.map_err(Error::Download)?;
|
||||
let submission_id=upload_info.SubmissionID;
|
||||
let result=upload_inner(
|
||||
upload_info,
|
||||
&self.cloud_context,
|
||||
&self.cookie_context,
|
||||
self.group_id,
|
||||
).await;
|
||||
|
||||
// transparently handle gzipped models
|
||||
let model_data=maybe_gzip.to_vec().map_err(Error::IO)?;
|
||||
// update the submission depending on the result
|
||||
match result{
|
||||
Ok(uploaded_asset_id)=>{
|
||||
// note the asset id of the created model for later release, and mark the submission as uploaded
|
||||
self.submissions.set_status_uploaded(rust_grpc::validator::StatusUploadedRequest{
|
||||
id:submission_id,
|
||||
uploaded_asset_id,
|
||||
}).await.map_err(Error::ApiActionSubmissionUploaded)?;
|
||||
},
|
||||
Err(e)=>{
|
||||
// log error
|
||||
println!("[upload_submission] Error: {e}");
|
||||
|
||||
// upload the map to the strafesnet group
|
||||
let upload_response=self.cookie_context.create(rbx_asset::cookie::CreateRequest{
|
||||
name:upload_info.ModelName.clone(),
|
||||
description:"".to_owned(),
|
||||
ispublic:false,
|
||||
allowComments:false,
|
||||
groupId:self.group_id,
|
||||
},model_data).await.map_err(Error::Create)?;
|
||||
self.submissions.create_audit_error(
|
||||
rust_grpc::validator::AuditErrorRequest{
|
||||
id:submission_id,
|
||||
error_message:e.to_string(),
|
||||
}
|
||||
).await.map_err(Error::ApiActionSubmissionUploaded)?;
|
||||
|
||||
// note the asset id of the created model for later release, and mark the submission as uploaded
|
||||
self.submissions.set_status_uploaded(rust_grpc::validator::StatusUploadedRequest{
|
||||
id:upload_info.SubmissionID,
|
||||
uploaded_asset_id:upload_response.AssetId,
|
||||
}).await.map_err(Error::ApiActionSubmissionUploaded)?;
|
||||
// update the submission model status to accepted
|
||||
self.submissions.set_status_not_uploaded(rust_grpc::validator::SubmissionId{
|
||||
id:submission_id,
|
||||
}).await.map_err(Error::ApiActionSubmissionUploaded)?;
|
||||
},
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::nats_types::ValidateMapfixRequest;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
ApiActionMapfixValidate(tonic::Status),
|
||||
@@ -37,7 +37,7 @@ impl crate::message_handler::MessageHandler{
|
||||
).await.map_err(Error::ApiActionMapfixValidate)?;
|
||||
|
||||
// update the mapfix model status to accepted
|
||||
self.mapfixes.set_status_failed(rust_grpc::validator::MapfixId{
|
||||
self.mapfixes.set_status_not_validated(rust_grpc::validator::MapfixId{
|
||||
id:mapfix_id,
|
||||
}).await.map_err(Error::ApiActionMapfixValidate)?;
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::nats_types::ValidateSubmissionRequest;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
ApiActionSubmissionValidate(tonic::Status),
|
||||
@@ -37,7 +37,7 @@ impl crate::message_handler::MessageHandler{
|
||||
).await.map_err(Error::ApiActionSubmissionValidate)?;
|
||||
|
||||
// update the submission model status to accepted
|
||||
self.submissions.set_status_failed(rust_grpc::validator::SubmissionId{
|
||||
self.submissions.set_status_not_validated(rust_grpc::validator::SubmissionId{
|
||||
id:submission_id,
|
||||
}).await.map_err(Error::ApiActionSubmissionValidate)?;
|
||||
},
|
||||
|
||||
@@ -17,7 +17,7 @@ fn hash_source(source:&str)->u64{
|
||||
std::hash::Hasher::finish(&hasher)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum Error{
|
||||
ModelInfoDownload(rbx_asset::cloud::GetError),
|
||||
@@ -52,7 +52,7 @@ impl std::fmt::Display for Error{
|
||||
}
|
||||
impl std::error::Error for Error{}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[expect(nonstandard_style)]
|
||||
pub struct ValidateRequest{
|
||||
pub ModelID:u64,
|
||||
pub ModelVersion:u64,
|
||||
@@ -318,7 +318,7 @@ fn get_partial_path(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance)
|
||||
}
|
||||
|
||||
fn get_script_refs(dom:&rbx_dom_weak::WeakDom)->Vec<rbx_dom_weak::types::Ref>{
|
||||
let db=rbx_reflection_database::get();
|
||||
let db=rbx_reflection_database::get().unwrap();
|
||||
let superclass=&db.classes["LuaSourceContainer"];
|
||||
dom.descendants().filter_map(|inst|{
|
||||
let class=db.classes.get(inst.class.as_str())?;
|
||||
|
||||
34
web/.gitignore
vendored
34
web/.gitignore
vendored
@@ -1,24 +1,12 @@
|
||||
bun.lockb
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/versions
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
/dist
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
@@ -29,12 +17,22 @@ npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# env files (can opt-in for committing if needed)
|
||||
# env files
|
||||
.env*
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
|
||||
# editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
|
||||
@@ -1,13 +1,29 @@
|
||||
FROM registry.itzana.me/docker-proxy/oven/bun:1.2.8
|
||||
# Build stage
|
||||
FROM registry.itzana.me/docker-proxy/oven/bun:1.3.3 AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json bun.lockb* ./
|
||||
RUN bun install --frozen-lockfile
|
||||
|
||||
COPY . .
|
||||
RUN bun run build
|
||||
|
||||
# Release
|
||||
FROM registry.itzana.me/docker-proxy/nginx:alpine
|
||||
|
||||
COPY --from=builder /app/build /usr/share/nginx/html
|
||||
|
||||
# Add nginx configuration for SPA routing
|
||||
RUN echo 'server { \
|
||||
listen 3000; \
|
||||
location / { \
|
||||
root /usr/share/nginx/html; \
|
||||
index index.html; \
|
||||
try_files $uri $uri/ /index.html; \
|
||||
} \
|
||||
}' > /etc/nginx/conf.d/default.conf
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
|
||||
RUN bun install
|
||||
RUN bun run build
|
||||
ENTRYPOINT ["bun", "run", "start"]
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
|
||||
756
web/bun.lock
756
web/bun.lock
File diff suppressed because it is too large
Load Diff
13
web/index.html
Normal file
13
web/index.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/x-icon" href="/favicon.ico" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Maps Service</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,16 +0,0 @@
|
||||
import type { NextConfig } from "next";
|
||||
|
||||
const nextConfig: NextConfig = {
|
||||
distDir: "build",
|
||||
output: "standalone",
|
||||
images: {
|
||||
remotePatterns: [
|
||||
{
|
||||
protocol: "https",
|
||||
hostname: "**.rbxcdn.com",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
4142
web/package-lock.json
generated
Normal file
4142
web/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user