Skip to content

Commit 9950fe0

Browse files
Add UltraSharpV2 (thanks kim)
1 parent 54718d5 commit 9950fe0

File tree

1 file changed

+88
-0
lines changed

1 file changed

+88
-0
lines changed

data/models/UltraSharpV2.json

Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
{
2+
"name": "UltraSharpV2",
3+
"author": "kim2091",
4+
"license": "CC-BY-NC-SA-4.0",
5+
"tags": [
6+
"ai-generated",
7+
"anime",
8+
"anti-aliasing",
9+
"compression-removal",
10+
"deblur",
11+
"game-textures",
12+
"general-upscaler",
13+
"jpeg",
14+
"photo",
15+
"restoration",
16+
"text"
17+
],
18+
"description": "Please consider supporting my work on [Ko-Fi](https://ko-fi.com/kim20913944)!\n\nThis model has been in the works for a very very very long time. I spent years creating my own private dataset just to train this model. It can handle realistic images, anime, cartoons, and quite a bit more without much hassle. It also works amazingly on illustrations and artwork. This is easily the best model I've ever trained.\n\nI hope you all like it.\n\nThe Lite model is based on the RealPLKSR architecture, rather than DAT2.\n\nAnd please consider donating! I'm going through a rough time right now & any support helps a LOT\n\n**Showcase:**\n\nFlux comparisons: <https://slow.pics/s/bsfbNpMA>\n\nImage Upscales: <https://slow.pics/s/RORQ4R0U?image-fit=contain>\n\nV1 vs V2 Upscales: <https://slow.pics/c/2HIBlzAh>",
19+
"date": "2025-05-23",
20+
"architecture": "dat",
21+
"size": [
22+
"DAT2"
23+
],
24+
"scale": 4,
25+
"inputChannels": 3,
26+
"outputChannels": 3,
27+
"resources": [
28+
{
29+
"urls": [
30+
"https://huggingface.co/Kim2091/UltraSharpV2/resolve/main/4x-UltraSharpV2.safetensors?download=true"
31+
],
32+
"sha256": "2b5db674aa8f3864a696dccd1d07d7cc0500fbf220f35044cd30eee9a7f971a7",
33+
"size": 139792588,
34+
"platform": "pytorch",
35+
"type": "safetensors"
36+
},
37+
{
38+
"urls": [
39+
"https://huggingface.co/Kim2091/UltraSharpV2/blob/main/4x-UltraSharpV2_fp32_op17.onnx"
40+
],
41+
"sha256": "6c0201e3403745f39a9aa5273c50ed084cf6c4af5a71a4654c30252cf19bf0a5",
42+
"size": 51800517,
43+
"platform": "onnx",
44+
"type": "onnx"
45+
}
46+
],
47+
"images": [
48+
{
49+
"type": "standalone",
50+
"url": "https://i.slow.pics/WIlUYVEn.webp"
51+
},
52+
{
53+
"type": "standalone",
54+
"url": "https://i.slow.pics/gn1LTyo2.webp"
55+
},
56+
{
57+
"type": "paired",
58+
"LR": "https://i.slow.pics/rYWRc0Vt.webp",
59+
"SR": "https://i.slow.pics/ovD5pg9L.webp",
60+
"caption": "lossless"
61+
},
62+
{
63+
"type": "paired",
64+
"LR": "https://i.slow.pics/EURPGH5C.webp",
65+
"SR": "https://i.slow.pics/QhszMnKH.webp",
66+
"caption": "jpeg75"
67+
},
68+
{
69+
"type": "paired",
70+
"LR": "https://i.slow.pics/jzA04YFI.webp",
71+
"SR": "https://i.slow.pics/3it3fgKN.webp"
72+
},
73+
{
74+
"type": "paired",
75+
"LR": "https://i.slow.pics/O7MSvzVY.webp",
76+
"SR": "https://i.slow.pics/DIIcP5Rz.webp",
77+
"caption": "jpeg85"
78+
},
79+
{
80+
"type": "paired",
81+
"LR": "https://i.slow.pics/xdMMHbxp.webp",
82+
"SR": "https://i.slow.pics/TDb7SUJQ.webp",
83+
"caption": "lossless"
84+
}
85+
],
86+
"dataset": "Private dataset",
87+
"trainingOTF": false
88+
}

0 commit comments

Comments
 (0)