image_generator_openai.rs 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123
  1. use dioxus::prelude::*;
  2. use serde::{Deserialize, Serialize};
  3. use serde_json::{json, Error};
  4. fn main() {
  5. dioxus::launch(app)
  6. }
  7. fn app() -> Element {
  8. let mut loading = use_signal(|| "".to_string());
  9. let mut api = use_signal(|| "".to_string());
  10. let mut prompt = use_signal(|| "".to_string());
  11. let mut n_image = use_signal(|| 1.to_string());
  12. let mut image = use_signal(|| ImageResponse {
  13. created: 0,
  14. data: Vec::new(),
  15. });
  16. let mut generate_images = use_resource(move || async move {
  17. let api_key = api.peek().clone();
  18. let prompt = prompt.peek().clone();
  19. let number_of_images = n_image.peek().clone();
  20. if api_key.is_empty() || prompt.is_empty() || number_of_images.is_empty() {
  21. return;
  22. }
  23. loading.set("is-loading".to_string());
  24. match request(api_key, prompt, number_of_images).await {
  25. Ok(imgz) => image.set(imgz),
  26. Err(e) => println!("Error: {:?}", e),
  27. }
  28. loading.set("".to_string());
  29. });
  30. rsx! {
  31. document::Link { rel: "stylesheet", href: "https://unpkg.com/bulma@0.9.0/css/bulma.min.css" }
  32. div { class: "container",
  33. div { class: "columns",
  34. div { class: "column",
  35. input { class: "input is-primary mt-4",
  36. value: "{api}",
  37. r#type: "text",
  38. placeholder: "API",
  39. oninput: move |evt| api.set(evt.value()),
  40. }
  41. input { class: "input is-primary mt-4",
  42. placeholder: "MAX 1000 Dgts",
  43. r#type: "text",
  44. value:"{prompt}",
  45. oninput: move |evt| prompt.set(evt.value())
  46. }
  47. input { class: "input is-primary mt-4",
  48. r#type: "number",
  49. min:"1",
  50. max:"10",
  51. value:"{n_image}",
  52. oninput: move |evt| n_image.set(evt.value()),
  53. }
  54. }
  55. }
  56. button { class: "button is-primary {loading}",
  57. onclick: move |_| generate_images.restart(),
  58. "Generate image"
  59. }
  60. br {}
  61. for image in image.read().data.as_slice() {
  62. section { class: "is-flex",
  63. div { class: "container is-fluid",
  64. div { class: "container has-text-centered",
  65. div { class: "is-justify-content-center",
  66. div { class: "level",
  67. div { class: "level-item",
  68. figure { class: "image", img { alt: "", src: "{image.url}", } }
  69. }
  70. }
  71. }
  72. }
  73. }
  74. }
  75. }
  76. }
  77. }
  78. }
  79. async fn request(api: String, prompt: String, n_image: String) -> Result<ImageResponse, Error> {
  80. let client = reqwest::Client::new();
  81. let body = json!({
  82. "prompt": prompt,
  83. "n":n_image.parse::<i32>().unwrap_or(1),
  84. "size":"1024x1024",
  85. });
  86. let mut authorization = "Bearer ".to_string();
  87. authorization.push_str(&api);
  88. let res = client
  89. .post("https://api.openai.com/v1/images/generations")
  90. .body(body.to_string())
  91. .header("Content-Type", "application/json")
  92. .header("Authorization", authorization)
  93. .send()
  94. .await
  95. .unwrap()
  96. .text()
  97. .await
  98. .unwrap();
  99. let deserialized: ImageResponse = serde_json::from_str(&res)?;
  100. Ok(deserialized)
  101. }
  102. #[derive(Serialize, Deserialize, Debug, PartialEq, Props, Clone)]
  103. struct UrlImage {
  104. url: String,
  105. }
  106. #[derive(Serialize, Deserialize, Debug, PartialEq, Props, Clone)]
  107. struct ImageResponse {
  108. created: i32,
  109. data: Vec<UrlImage>,
  110. }