home commited on
Commit
36f9974
·
1 Parent(s): a7dc7a7

add curl examples

Browse files
Files changed (3) hide show
  1. curl_example.md +15 -0
  2. defake/blipmodels/med.py +20 -0
  3. defake/test_api.py +1 -1
curl_example.md ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ API Usage (based on CURL)
3
+ ```
4
+ EVENT_ID=$(curl -s -X POST https://jony7chu-cispa-citizen-defake.hf.space/gradio_api/call/detect_api \
5
+ -H "Content-Type: application/json" \
6
+ -d '{
7
+ "data": [
8
+ {"path": "https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png", "meta": {"_type": "gradio.FileData"}},
9
+ "API_KEY"
10
+ ]
11
+ }' | grep -o '"event_id":"[^"]*"' | cut -d'"' -f4) && \
12
+ curl -N "https://jony7chu-cispa-citizen-defake.hf.space/gradio_api/call/detect_api/$EVENT_ID"
13
+ ```
14
+
15
+
defake/blipmodels/med.py CHANGED
@@ -947,6 +947,26 @@ class BertLMHeadModel(BertPreTrainedModel):
947
  "encoder_attention_mask": model_kwargs.get("encoder_attention_mask", None),
948
  "is_decoder": True,
949
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
950
 
951
  def _reorder_cache(self, past, beam_idx):
952
  reordered_past = ()
 
947
  "encoder_attention_mask": model_kwargs.get("encoder_attention_mask", None),
948
  "is_decoder": True,
949
  }
950
+
951
+ # Fix the transformer conflicts (not working)
952
+ # def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, encoder_hidden_states=None, encoder_attention_mask=None, **model_kwargs):
953
+ # input_shape = input_ids.shape
954
+ # # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly
955
+ # if attention_mask is None:
956
+ # attention_mask = input_ids.new_ones(input_shape)
957
+
958
+ # # cut decoder_input_ids if past is used
959
+ # if past is not None:
960
+ # input_ids = input_ids[:, -1:]
961
+
962
+ # return {
963
+ # "input_ids": input_ids,
964
+ # "attention_mask": attention_mask,
965
+ # "past_key_values": past,
966
+ # "encoder_hidden_states": encoder_hidden_states,
967
+ # "encoder_attention_mask": encoder_attention_mask,
968
+ # "is_decoder": True,
969
+ # }
970
 
971
  def _reorder_cache(self, past, beam_idx):
972
  reordered_past = ()
defake/test_api.py CHANGED
@@ -6,7 +6,7 @@ import torch.nn.functional as F
6
  import torch.nn as nn
7
  import argparse
8
 
9
- from blipmodels import blip_decoder
10
 
11
  class NeuralNet(nn.Module):
12
  def __init__(self, input_size, hidden_size_list, num_classes):
 
6
  import torch.nn as nn
7
  import argparse
8
 
9
+ from .blipmodels import blip_decoder
10
 
11
  class NeuralNet(nn.Module):
12
  def __init__(self, input_size, hidden_size_list, num_classes):