Skip to content

Commit

Permalink
notebook for recoloring
Browse files Browse the repository at this point in the history
  • Loading branch information
CCareaga committed Jan 4, 2024
1 parent 8f95cfd commit 6b2b4c2
Showing 1 changed file with 57 additions and 40 deletions.
97 changes: 57 additions & 40 deletions recoloring.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
"colab": {
"provenance": [],
"gpuType": "T4",
"authorship_tag": "ABX9TyPD3pXRpOuigYiFeN4QIF32",
"include_colab_link": true
},
"kernelspec": {
Expand All @@ -27,27 +28,16 @@
"<a href=\"https://colab.research.google.com/github/compphoto/Intrinsic/blob/main/recoloring.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "markdown",
"source": [
"Make sure to set the runtime to GPU: Runtime -> Change runtime type -> T4 GPU\n",
"\n",
"You can upload your own images, then change the relevant code cells to load it and send it through the model."
],
"metadata": {
"id": "-lKPW1wZEulh"
}
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "_cPkUxN-ikld"
"id": "nnOA8b8E3Y6U"
},
"outputs": [],
"source": [
"# install the intrinsic decomposition repo from github\n",
"!pip install https://github.com/compphoto/Intrinsic/archive/main.zip"
"!git clone https://github.com/compphoto/Intrinsic\n",
"!cd Intrinsic/ && pip install ."
]
},
{
Expand All @@ -56,95 +46,122 @@
"import torch\n",
"\n",
"# import some helper functions from chrislib (will be installed by the intrinsic repo)\n",
"from chrislib.general import show, view, uninvert\n",
"from chrislib.general import show, view, uninvert, match_scale\n",
"from chrislib.data_util import load_image\n",
"\n",
"# import model loading and running the pipeline\n",
"from intrinsic.pipeline import run_pipeline\n",
"from intrinsic.model_util import load_models"
],
"metadata": {
"id": "J0gn82ZSjomn"
"id": "mS9AFGEj3jhc"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"# download the pretrained weights and return the model (may take a bit to download weights)\n",
"intrinsic_model = load_models('paper_weights')"
],
"metadata": {
"id": "Ap3HubpwC_KG"
"id": "nw0poq363mqy"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"# load an example image from the github repo\n",
"torch.hub.download_url_to_file('https://raw.githubusercontent.com/compphoto/Intrinsic/main/figures/avocado.png', 'avo.png')"
"# three different example scenes from the paper\n",
"scene_name = 'yellow_chair'\n",
"# scene_name = 'brown_chairs'\n",
"# scene_name = 'spain_museum'"
],
"metadata": {
"id": "m_NYfDx0AhTw"
"id": "l5YBpZ3a5rfS"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"# load the image to run through the pipeline\n",
"img = load_image('/content/avo.png')"
"inp = load_image(f'Intrinsic/examples/{scene_name}/input.png')[:, :, :3]\n",
"msk = load_image(f'Intrinsic/examples/{scene_name}/mask.png')[:, :, :3]\n",
"tex = load_image(f'Intrinsic/examples/{scene_name}/texture.png')[:, :, :3] ** 2.2"
],
"metadata": {
"id": "ALb4Pjfvj-MU"
"id": "zmWeGYSP77W4"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"# run the image through the pipeline (use R0 resizing dicussed in the paper)\n",
"result = run_pipeline(\n",
"results = run_pipeline(\n",
" intrinsic_model,\n",
" img,\n",
" resize_conf=0.0,\n",
" maintain_size=True,\n",
" linear=False,\n",
" device='cuda'\n",
")"
" inp,\n",
" resize_conf=None,\n",
" maintain_size=True\n",
")\n",
"\n",
"alb = results['albedo']\n",
"image = results['image']\n",
"inv_shd = results['inv_shading']\n",
"\n",
"shd = uninvert(inv_shd)[:, :, None]"
],
"metadata": {
"id": "QW0TiFypkOj-"
"id": "pDr4Wh7M5pIE"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"# convert the inverse shading to regular shading for visualization\n",
"shd = uninvert(result['inv_shading'])\n",
"alb = result['albedo']"
"def perform_recolor(msk, alb, shd, shd_power=1.0, recolor=None):\n",
" # this function will perform the illumination-aware recoloring, or apply a shading curve\n",
" # msk - numpy array (HxWx1) denoting the region to perform the edit\n",
" # alb - linear albedo of the image\n",
" # shd - linear shading of the image\n",
" # shd_power - exponent to apply to the shading (<1 for more diffuse, >1 for more specular)\n",
" # recolor - a texture to apply to the edited region, no recoloring is performed if set to None\n",
"\n",
" if recolor is None:\n",
" our_new_alb = alb\n",
" else:\n",
" # we match the scale of the texture to the albedo in the edited region to\n",
" # ensure the appearance of the region is maintained, but this can be altered\n",
" recolor = match_scale(recolor, alb, msk.astype(bool))\n",
" our_new_alb = ((1.0 - msk) * alb) + (msk * recolor)\n",
"\n",
" # apply exponentiation to the shading of the region and composite\n",
" masked_shd = msk * (shd ** shd_power)\n",
" new_shd = ((1.0 - msk) * shd) + masked_shd\n",
"\n",
" # combine edited albedo and shading, gamma correct and clip\n",
" recolored = (our_new_alb * new_shd) ** (1/2.2)\n",
"\n",
" return recolored.clip(0, 1)"
],
"metadata": {
"id": "XpYY2MNjkp2f"
"id": "kip8Y1tE83Zi"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"# show the result (gamma corrects the linear intrinsic components and scales to [0-1])\n",
"show([img, view(shd), view(alb)], size=(20, 7))"
"# NOTE: setting the shading exponent to >1 will make the shading appear more specular,\n",
"# but small errors in the shading (albedo leakage) will be amplified in some cases\n",
"show(perform_recolor(msk, alb, shd, 1.0, recolor=tex))"
],
"metadata": {
"id": "8KKbyoVLki9s"
"id": "oF2ljuOv84WA"
},
"execution_count": null,
"outputs": []
Expand Down

0 comments on commit 6b2b4c2

Please sign in to comment.