Skip to content

Commit

Permalink
Delete unused files, refactoring for CR
Browse files Browse the repository at this point in the history
  • Loading branch information
anlausch committed Nov 19, 2019
1 parent bf155e0 commit 3f7fa79
Show file tree
Hide file tree
Showing 44 changed files with 300 additions and 1,955 deletions.
10 changes: 0 additions & 10 deletions augmentation_embeddings.sh

This file was deleted.

14 changes: 0 additions & 14 deletions augmentation_embeddings_random.sh

This file was deleted.

17 changes: 17 additions & 0 deletions bam.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import numpy as np

def debias_proc(equality_sets, vecs, vocab):
A = []
B = []
for eq in equality_sets:
if eq[0] in vocab and eq[1] in vocab:
A.append(vecs[vocab[eq[0]]])
B.append(vecs[vocab[eq[1]]])
A = np.array(A); B = np.array(B);

product = np.matmul(A.transpose(), B)
U, s, V = np.linalg.svd(product)
print(U.shape, V.shape)
proj_mat = V #np.matmul(U, V)
res = np.matmul(vecs, proj_mat)
return (res + vecs) / 2, proj_mat
116 changes: 0 additions & 116 deletions classifier.py

This file was deleted.

12 changes: 8 additions & 4 deletions data_handler.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
'''
Used to prepare data for input to debiasnet, e.g. preparing training constraints
'''

import codecs
from itertools import product
import pickle
Expand Down Expand Up @@ -31,7 +35,7 @@ def split_weat_in_train_dev(input_paths, output_train, output_dev):
:param output_train:
:param output_dev:
:return:
>>> split_weat_in_train_dev(["./data/weat_1_aug_postspec_2_new.txt"], "./data/weat_1_aug_postspec_2_train.txt", "./data/weat_1_aug_postspec_2_dev.txt")
>>> split_weat_in_train_dev(["./data/weat_1_aug_postspec_2.txt"], "./data/weat_1_aug_postspec_2_train.txt", "./data/weat_1_aug_postspec_2_dev.txt")
"""
t1, t2, a1, a2 = fuse_stimuli(input_paths)
weat_dict = {}
Expand All @@ -50,7 +54,7 @@ def split_weat_in_train_dev(input_paths, output_train, output_dev):



def filter_vocabulary(term_list, vocab_path="/work/gglavas/data/word_embs/yacle/fasttext/200K/npformat/ft.wiki.en.300.vocab"):
def filter_vocabulary(term_list, vocab_path="./data/word_embs/yacle/fasttext/200K/npformat/ft.wiki.en.300.vocab"):
vocab = pickle.load(open(vocab_path, "rb"))
new_term_list = []
for t in term_list:
Expand All @@ -62,7 +66,7 @@ def filter_vocabulary(term_list, vocab_path="/work/gglavas/data/word_embs/yacle/


def prepare_input_examples(input_paths, output_path, random_attributes=False,
sampling_vocab_path="/work/gglavas/data/word_embs/yacle/fasttext/200K/npformat/ft.wiki.en.300.vocab",
sampling_vocab_path="./data/word_embs/yacle/fasttext/200K/npformat/ft.wiki.en.300.vocab",
original_weat_path="", k=60, switch_targets_and_attributes=False):
"""
:param paths:
Expand All @@ -89,7 +93,7 @@ def prepare_input_examples(input_paths, output_path, random_attributes=False,
random_attributes=True, original_weat_path="./data/weat_8.txt", k=60, switch_targets_and_attributes=False)
#>>> prepare_input_examples(["./data/weat_8_aug_postspec_4_new.txt"], "./data/weat_8_prepared_filtered_postspec_4_wo_original_random_switched.txt", \
random_attributes=True, original_weat_path="./data/weat_8.txt", k=60, switch_targets_and_attributes=True)
>>> prepare_input_examples(["./data/weat_8_aug_postspec_4_new.txt"], "./data/weat_8_prepared_filtered_postspec_4_wo_original_switched.txt", switch_targets_and_attributes=True)
>>> prepare_input_examples(["./data/weat_8_aug_postspec_4.txt"], "./data/weat_8_prepared_filtered_postspec_4_wo_original_switched.txt", switch_targets_and_attributes=True)
"""
t1, t2, a1, a2 = fuse_stimuli(input_paths)
if switch_targets_and_attributes:
Expand Down
7 changes: 0 additions & 7 deletions debbie.sh

This file was deleted.

15 changes: 0 additions & 15 deletions debbie_args.sh

This file was deleted.

38 changes: 0 additions & 38 deletions debbie_args_data.sh

This file was deleted.

40 changes: 0 additions & 40 deletions debbie_args_data2.sh

This file was deleted.

7 changes: 5 additions & 2 deletions model.py → debiasnet.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
"""
Full debiasnet model
"""
import tensorflow as tf
from flip_gradient import flip_gradient

Expand All @@ -24,8 +27,8 @@ def le_distance(t1, t2, asym_fact):
def hinge_loss(true_ledists, false_ledists, margin):
return tf.reduce_sum(tf.maximum(tf.subtract(tf.constant(margin, dtype = tf.float32), tf.subtract(false_ledists, true_ledists)), 0.0))

class DebbieModel(object): # setting the adversarial grad scale to -1 turns of the flipping of the gradient
def __init__(self, embs, mlp_layers, activation = tf.nn.tanh, scope = "debbie", reg_factor = 0.1, learning_rate = 0.0001, adversarial_grad_scale=1.0, i_factor=1.0, e_factor=1.0):
class DebiasNetModel(object): # setting the adversarial grad scale to -1 turns of the flipping of the gradient
def __init__(self, embs, mlp_layers, activation = tf.nn.tanh, scope = "debie", reg_factor = 0.1, learning_rate = 0.0001, adversarial_grad_scale=1.0, i_factor=1.0, e_factor=1.0):
self.embeddings = embs
self.scope = scope

Expand Down
7 changes: 5 additions & 2 deletions model_direct.py → debiasnet_direct.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
"""
Debiasnet model with direct implicit debiasing training objective (not reported in the paper)
"""
import tensorflow as tf
from flip_gradient import flip_gradient

Expand All @@ -24,8 +27,8 @@ def le_distance(t1, t2, asym_fact):
def hinge_loss(true_ledists, false_ledists, margin):
return tf.reduce_sum(tf.maximum(tf.subtract(tf.constant(margin, dtype = tf.float32), tf.subtract(false_ledists, true_ledists)), 0.0))

class DebbieModel(object): # setting the adversarial grad scale to -1 turns of the flipping of the gradient
def __init__(self, embs, mlp_layers, activation = tf.nn.tanh, scope = "debbie", reg_factor = 0.1, learning_rate = 0.0001, adversarial_grad_scale=1.0, i_factor=1.0, e_factor=1.0):
class DebiasNetModel(object): # setting the adversarial grad scale to -1 turns of the flipping of the gradient
def __init__(self, embs, mlp_layers, activation = tf.nn.tanh, scope = "debie", reg_factor = 0.1, learning_rate = 0.0001, adversarial_grad_scale=1.0, i_factor=1.0, e_factor=1.0):
self.embeddings = embs
self.scope = scope

Expand Down
Loading

0 comments on commit 3f7fa79

Please sign in to comment.