image_generator_openai.rs 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150
  1. use dioxus::prelude::*;
  2. use serde::{Deserialize, Serialize};
  3. use serde_json::{json, Error};
  4. fn main() {
  5. launch(app)
  6. }
  7. fn app() -> Element {
  8. let mut api = use_signal(|| "".to_string());
  9. let mut prompt = use_signal(|| "".to_string());
  10. let mut n_image = use_signal(|| 1.to_string());
  11. let mut image = use_signal(|| ImageResponse {
  12. created: 0,
  13. data: Vec::new(),
  14. });
  15. let mut loading = use_signal(|| "".to_string());
  16. let mut generate_images = use_resource(move || async move {
  17. let api_key = api.peek().clone();
  18. let prompt = prompt.peek().clone();
  19. let number_of_images = n_image.peek().clone();
  20. if api_key.is_empty() || prompt.is_empty() || number_of_images.is_empty() {
  21. return;
  22. }
  23. loading.set("is-loading".to_string());
  24. let images = request(api_key, prompt, number_of_images).await;
  25. match images {
  26. Ok(imgz) => {
  27. image.set(imgz);
  28. }
  29. Err(e) => {
  30. println!("Error: {:?}", e);
  31. }
  32. }
  33. loading.set("".to_string());
  34. });
  35. rsx! {
  36. head {
  37. link {
  38. rel: "stylesheet",
  39. href: "https://unpkg.com/bulma@0.9.0/css/bulma.min.css",
  40. }
  41. }
  42. div { class: "container",
  43. div { class: "columns",
  44. div { class: "column",
  45. input { class: "input is-primary mt-4",
  46. value:"{api}",
  47. r#type: "text",
  48. placeholder: "API",
  49. oninput: move |evt| {
  50. api.set(evt.value().clone());
  51. },
  52. }
  53. input { class: "input is-primary mt-4",
  54. placeholder: "MAX 1000 Dgts",
  55. r#type: "text",
  56. value:"{prompt}",
  57. oninput: move |evt| {
  58. prompt.set(evt.value().clone());
  59. },
  60. }
  61. input { class: "input is-primary mt-4",
  62. r#type: "number",
  63. min:"1",
  64. max:"10",
  65. value:"{n_image}",
  66. oninput: move |evt| {
  67. n_image.set(evt.value().clone());
  68. },
  69. }
  70. }
  71. }
  72. button { class: "button is-primary {loading}",
  73. onclick: move |_| {
  74. generate_images.restart();
  75. },
  76. "Generate image"
  77. }
  78. br {
  79. }
  80. }
  81. {image.read().data.iter().map(|image| {
  82. rsx!(
  83. section { class: "is-flex",
  84. div { class: "container is-fluid",
  85. div { class: "container has-text-centered",
  86. div { class: "is-justify-content-center",
  87. div { class: "level",
  88. div { class: "level-item",
  89. figure { class: "image",
  90. img {
  91. alt: "",
  92. src: "{image.url}",
  93. }
  94. }
  95. }
  96. }
  97. }
  98. }
  99. }
  100. }
  101. )
  102. })
  103. } }
  104. }
  105. async fn request(api: String, prompt: String, n_image: String) -> Result<ImageResponse, Error> {
  106. let client = reqwest::Client::new();
  107. let body = json!({
  108. "prompt": prompt,
  109. "n":n_image.parse::<i32>().unwrap_or(1),
  110. "size":"1024x1024",
  111. });
  112. let mut authorization = "Bearer ".to_string();
  113. authorization.push_str(&api);
  114. let res = client
  115. .post("https://api.openai.com/v1/images/generations")
  116. .body(body.to_string())
  117. .header("Content-Type", "application/json")
  118. .header("Authorization", authorization)
  119. .send()
  120. .await
  121. .unwrap()
  122. .text()
  123. .await
  124. .unwrap();
  125. let deserialized: ImageResponse = serde_json::from_str(&res)?;
  126. Ok(deserialized)
  127. }
  128. #[derive(Serialize, Deserialize, Debug, PartialEq, Props, Clone)]
  129. struct UrlImage {
  130. url: String,
  131. }
  132. #[derive(Serialize, Deserialize, Debug, PartialEq, Props, Clone)]
  133. struct ImageResponse {
  134. created: i32,
  135. data: Vec<UrlImage>,
  136. }